Python kernel crashes after 2min savedata

I am using paraview 5.12.1 with python 3.10.4.
I have created a trace from the gui and tried to run it with python.
In the gui the SaveData takes a long time (10 min) with the interface unresponsive,but it finishes.
In python after 2 mins, the kernel dies and the data is not completly exported.
No reason for the crash is given.

# trace generated using paraview version 5.11.0
#import paraview
#paraview.compatibility.major = 5
#paraview.compatibility.minor = 11

import sys
sys.path.append("C:\\Program Files\\ParaView 5.12.1\\bin\\Lib")
sys.path.append("C:\\Program Files\\ParaView 5.12.1\\bin\\Lib\\site-packages")

pvd_file_path = "file.pvd"
pvd_file_name = "pvd_file"
csv_file_path =  "file.csv"
csv_file_name = "csv_file"
data_destination = "results.csv"


#### import the simple module from the paraview
from paraview.simple import *
#### disable automatic camera reset on 'Show'
paraview.simple._DisableFirstRenderCameraReset()

# create a new 'PVD Reader'
pvdfile = PVDReader(registrationName=pvd_file_name, FileName=pvd_file_path)
pvdfile.CellArrays = ['ActiveElements']
pvdfile.PointArrays = ['Temperature']
pvdfile.ColumnArrays = []

# get animation scene
animationScene1 = GetAnimationScene()

# get the time-keeper
timeKeeper1 = GetTimeKeeper()

# update animation scene based on data timesteps
animationScene1.UpdateAnimationUsingDataTimeSteps()

UpdatePipeline(time=0.5, proxy=pvdfile)

# create a new 'CSV Reader'
csvfile = CSVReader(registrationName=csv_file_name, FileName=[csv_file_path])
csvfile.DetectNumericColumns = 1
csvfile.UseStringDelimiter = 1
csvfile.HaveHeaders = 1
csvfile.FieldDelimiterCharacters = ','
csvfile.AddTabFieldDelimiter = 0
csvfile.MergeConsecutiveDelimiters = 0

UpdatePipeline(time=0.5, proxy=csvfile)

# create a new 'Table To Points'
tableToPoints1 = TableToPoints(registrationName='TableToPoints1', Input=csvfile)
tableToPoints1.XColumn = 'x'
tableToPoints1.YColumn = 'y'
tableToPoints1.ZColumn = 'z'
tableToPoints1.a2DPoints = 0
tableToPoints1.KeepAllDataArrays = 0

UpdatePipeline(time=0.5, proxy=tableToPoints1)

# create a new 'Resample With Dataset'
resampleWithDataset1 = ResampleWithDataset(registrationName='ResampleWithDataset1', SourceDataArrays=pvdfile,
    DestinationMesh=tableToPoints1)
resampleWithDataset1.CategoricalData = 0
resampleWithDataset1.PassCellArrays = 0
resampleWithDataset1.PassPointArrays = 0
resampleWithDataset1.PassFieldArrays = 1
resampleWithDataset1.PassPartialArrays = 0
resampleWithDataset1.ComputeTolerance = 1
resampleWithDataset1.Tolerance = 2.220446049250313e-16
resampleWithDataset1.MarkBlankPointsAndCells = 0
resampleWithDataset1.SnapToCellWithClosestPoint = 0
resampleWithDataset1.CellLocator = 'Static Cell Locator'

UpdatePipeline(time=0.5, proxy=resampleWithDataset1)

# save data
SaveData(data_destination, proxy=resampleWithDataset1, WriteTimeSteps=1,
    WriteTimeStepsSeparately=0,
    Filenamesuffix='_%d',
    ChooseArraysToWrite=0,
    PointDataArrays=['ActiveElements', 'Temperature', 'vtkValidPointMask'],
    CellDataArrays=[],
    FieldDataArrays=[],
    VertexDataArrays=[],
    EdgeDataArrays=[],
    RowDataArrays=[],
    Precision=5,
    UseScientificNotation=0,
    FieldAssociation='Point Data',
    AddMetaData=1,
    AddTimeStep=1,
    AddTime=1)