I made a filter to texture multiple images to one model.
I use MapDataArrayToMultiTextureAttribute to implement this function.
In representation of the filter, selecting an area by vtkCommand::LeftButtonReleaseEvent.
In EndWidgetInteraction of representation, the code flow is as below:
endScreenPos[0] = eventPos[0];
endScreenPos[1] = eventPos[1];
vtkNew<vtkHardwareSelector> selector;
selector->SetRenderer(this->Renderer);
unsigned int size[4];
size[0] = std::min(this->startScreenPos[0], this->endScreenPos[0]);
size[1] = std::min(this->startScreenPos[1], this->endScreenPos[1]);
size[2] = std::max(this->startScreenPos[0], this->endScreenPos[0]);
size[3] = std::max(this->startScreenPos[1], this->endScreenPos[1]);
selector->SetArea(size);
selector->SetFieldAssociation(vtkDataObject::FIELD_ASSOCIATION_CELLS);
vtkSelection* selection = selector->Select();
vtkNew<vtkExtractSelection> visibleExtractSelection;
visibleExtractSelection->SetInputData(0, this->pickPolydata);
visibleExtractSelection->SetInputData(1, selection);
visibleExtractSelection->Update();
vtkNew<vtkDataSetSurfaceFilter> surfaceFilter;
surfaceFilter->SetInputConnection(visibleExtractSelection->GetOutputPort());
surfaceFilter->Update();
double cameraPos[3];
double textureCenter[3];
this->Renderer->GetActiveCamera()->GetPosition(cameraPos);
double centerScreenPos[2] = { 0 };
centerScreenPos[0] = (startScreenPos[0] + endScreenPos[0]) / 2;
centerScreenPos[1] = (startScreenPos[1] + endScreenPos[1]) / 2;
vtkNew<vtkCellPicker> picker;
picker->Pick(centerScreenPos[0], centerScreenPos[1], 0.0, this->Renderer);
picker->GetPickPosition(textureCenter);
double distanceA = sqrt(vtkMath::Distance2BetweenPoints(cameraPos, textureCenter));
double distanceB = sqrt(vtkMath::Distance2BetweenPoints(textureCenter, startWorldPos));
double aspect[3];
aspect[0] = 1.0;
aspect[1] = 1.0;
aspect[2] = distanceA / distanceB;
vtkNew<vtkProjectedTexture> projectedTexture;
projectedTexture->SetAspectRatio(aspect);
projectedTexture->SetPosition(cameraPos);
projectedTexture->SetFocalPoint(textureCenter);
projectedTexture->SetUp(0, 1, 0);
projectedTexture->SetInputData(this->pickPolydata);
projectedTexture->SetTRange(0.0, 1.0);
projectedTexture->SetSRange(0.0, 1.0);
projectedTexture->Update();
time_t timeStamp = time(nullptr);
string suffix = to_string(timeStamp);
vtkDataArray* tcoord = projectedTexture->GetOutput()->GetPointData()->GetTCoords();
std::string tcoordName = "tcoord" + suffix;
tcoord->SetName(tcoordName.c_str());
pickPolydata->GetPointData()->AddArray(tcoord);
vtkIdType aCount = this->pickPolydata->GetNumberOfPoints();
vtkPolyData* selectPolyData = surfaceFilter->GetOutput();
vtkIdType tCount = selectPolyData->GetNumberOfPoints();
for (vtkIdType i = 0; i < aCount; i++)
{
double pi[3] = { 0 };
pickPolydata->GetPoint(i, pi);
bool visible = false;
for (vtkIdType j = 0; j < tCount; j++)
{
double pj[3] = { 0 };
selectPolyData->GetPoint(j, pj);
if (pi[0] == pj[0] && pi[1] == pj[1] && pi[2] == pj[2])
{
visible = true;
break;
}
}
if (!visible)
{
tcoord->SetTuple2(i, -50000.0f, -50000.0f);
}
}
const char* file1 = "D:\\demo\\test.jpg";
vtkNew<vtkJPEGReader> reader1;
reader1->SetFileName(file1);
vtkNew<vtkTexture> tex1;
tex1->InterpolateOn();
tex1->SetBlendingMode(vtkTexture::VTK_TEXTURE_BLENDING_MODE_MODULATE);
tex1->SetInputConnection(reader1->GetOutputPort());
tex1->SetColorModeToDirectScalars();
tex1->SetPremultipliedAlpha(false);
tex1->SetWrap(vtkTexture::ClampToBorder);
tex1->SetBorderColor(1, 1, 1, 0);
tex1->InterpolateOn();
string textureName = "texture" + suffix;
pickActor->GetProperty()->SetTexture(textureName.c_str(), tex1);
pickMapper = static_cast<vtkPolyDataMapper*>(pickActor->GetMapper());
pickMapper->MapDataArrayToMultiTextureAttribute(textureName.c_str(), tcoordName.c_str(), vtkDataObject::FIELD_ASSOCIATION_POINTS);
pickMapper->Update();
Can someone show me the root cause? Thanks a lot.