diff --git a/Makefile b/Makefile index 7b068b9f..5950ad04 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ env: ## Display information about the current environment. .PHONY: install-dev install-dev: ## Install all dependencies including dev and test dependencies, as well as pre-commit. - poetry install --with dev --with test --extras "tensorflow h5py" + poetry install --with dev --with test --extras "h5py" pre-commit install .PHONY: install @@ -20,7 +20,7 @@ install-prod: ## Install prod dependencies. .PHONY: install-test install-test: ## Install test dependencies. - poetry install --with test --extras "tensorflow h5py" + poetry install --with test --extras "h5py" .PHONY: clean clean: ## Uninstall modelscan diff --git a/modelscan/data/__init__.py b/modelscan/data/__init__.py new file mode 100644 index 00000000..34c38771 --- /dev/null +++ b/modelscan/data/__init__.py @@ -0,0 +1 @@ +"""Data files for modelscan.""" diff --git a/modelscan/data/tensorflow_operators.json b/modelscan/data/tensorflow_operators.json new file mode 100644 index 00000000..909ac003 --- /dev/null +++ b/modelscan/data/tensorflow_operators.json @@ -0,0 +1,1477 @@ +{ + "description": "List of known TensorFlow raw operators from tensorflow.raw_ops.__dict__.keys()", + "version": "2.20.0", + "operators": [ + "__name__", + "__doc__", + "__package__", + "__loader__", + "__spec__", + "__path__", + "__file__", + "__cached__", + "__builtins__", + "_sys", + "BatchMatrixBandPart", + "BatchMatrixDiag", + "BatchMatrixDiagPart", + "BatchMatrixSetDiag", + "BatchToSpace", + "BatchToSpaceND", + "Bitcast", + "BroadcastArgs", + "BroadcastGradientArgs", + "BroadcastTo", + "CheckNumerics", + "CheckNumericsV2", + "Concat", + "ConcatOffset", + "ConcatV2", + "ConjugateTranspose", + "Const", + "DebugGradientIdentity", + "DebugGradientRefIdentity", + "DeepCopy", + "DepthToSpace", + "Dequantize", + "Diag", + "DiagPart", + "EditDistance", + "Empty", + "EnsureShape", + "ExpandDims", + "ExtractImagePatches", + "ExtractVolumePatches", + "FakeQuantWithMinMaxArgs", + "FakeQuantWithMinMaxArgsGradient", + "FakeQuantWithMinMaxVars", + "FakeQuantWithMinMaxVarsGradient", + "FakeQuantWithMinMaxVarsPerChannel", + "FakeQuantWithMinMaxVarsPerChannelGradient", + "Fill", + "Fingerprint", + "Gather", + "GatherNd", + "GatherV2", + "GuaranteeConst", + "Identity", + "IdentityN", + "ImmutableConst", + "InplaceAdd", + "InplaceSub", + "InplaceUpdate", + "InvertPermutation", + "ListDiff", + "LowerBound", + "MatrixBandPart", + "MatrixDiag", + "MatrixDiagPart", + "MatrixDiagPartV2", + "MatrixDiagPartV3", + "MatrixDiagV2", + "MatrixDiagV3", + "MatrixSetDiag", + "MatrixSetDiagV2", + "MatrixSetDiagV3", + "MirrorPad", + "MirrorPadGrad", + "OneHot", + "OnesLike", + "Pack", + "Pad", + "PadV2", + "ParallelConcat", + "Placeholder", + "PlaceholderV2", + "PlaceholderWithDefault", + "PreventGradient", + "QuantizeAndDequantize", + "QuantizeAndDequantizeV2", + "QuantizeAndDequantizeV3", + "QuantizeAndDequantizeV4", + "QuantizeAndDequantizeV4Grad", + "QuantizeV2", + "QuantizedConcat", + "QuantizedInstanceNorm", + "QuantizedReshape", + "Rank", + "RefIdentity", + "Reshape", + "ResourceStridedSliceAssign", + "Reverse", + "ReverseSequence", + "ReverseV2", + "ScatterNd", + "ScatterNdNonAliasingAdd", + "Shape", + "ShapeN", + "Size", + "Slice", + "Snapshot", + "SpaceToBatch", + "SpaceToBatchND", + "SpaceToDepth", + "Split", + "SplitV", + "Squeeze", + "StopGradient", + "StridedSlice", + "StridedSliceAssign", + "StridedSliceGrad", + "TensorScatterAdd", + "TensorScatterMax", + "TensorScatterMin", + "TensorScatterSub", + "TensorScatterUpdate", + "TensorStridedSliceUpdate", + "Tile", + "TileGrad", + "Transpose", + "Unique", + "UniqueV2", + "UniqueWithCounts", + "UniqueWithCountsV2", + "Unpack", + "UnravelIndex", + "UpperBound", + "Where", + "ZerosLike", + "AudioSpectrogram", + "DecodeWav", + "EncodeWav", + "Mfcc", + "Batch", + "BatchFunction", + "Unbatch", + "UnbatchGrad", + "BitwiseAnd", + "BitwiseOr", + "BitwiseXor", + "Invert", + "LeftShift", + "PopulationCount", + "RightShift", + "BoostedTreesAggregateStats", + "BoostedTreesBucketize", + "BoostedTreesCalculateBestFeatureSplit", + "BoostedTreesCalculateBestFeatureSplitV2", + "BoostedTreesCalculateBestGainsPerFeature", + "BoostedTreesCenterBias", + "BoostedTreesCreateEnsemble", + "BoostedTreesCreateQuantileStreamResource", + "BoostedTreesDeserializeEnsemble", + "BoostedTreesEnsembleResourceHandleOp", + "BoostedTreesExampleDebugOutputs", + "BoostedTreesFlushQuantileSummaries", + "BoostedTreesGetEnsembleStates", + "BoostedTreesMakeQuantileSummaries", + "BoostedTreesMakeStatsSummary", + "BoostedTreesPredict", + "BoostedTreesQuantileStreamResourceAddSummaries", + "BoostedTreesQuantileStreamResourceDeserialize", + "BoostedTreesQuantileStreamResourceFlush", + "BoostedTreesQuantileStreamResourceGetBucketBoundaries", + "BoostedTreesQuantileStreamResourceHandleOp", + "BoostedTreesSerializeEnsemble", + "BoostedTreesSparseAggregateStats", + "BoostedTreesSparseCalculateBestFeatureSplit", + "BoostedTreesTrainingPredict", + "BoostedTreesUpdateEnsemble", + "BoostedTreesUpdateEnsembleV2", + "IsBoostedTreesEnsembleInitialized", + "IsBoostedTreesQuantileStreamResourceInitialized", + "AllCandidateSampler", + "ComputeAccidentalHits", + "FixedUnigramCandidateSampler", + "LearnedUnigramCandidateSampler", + "LogUniformCandidateSampler", + "ThreadUnsafeUnigramCandidateSampler", + "UniformCandidateSampler", + "GenerateVocabRemapping", + "LoadAndRemapMatrix", + "KMC2ChainInitialization", + "KmeansPlusPlusInitialization", + "NearestNeighbors", + "CollectiveAllToAllV2", + "CollectiveAllToAllV3", + "CollectiveAssignGroupV2", + "CollectiveBcastRecv", + "CollectiveBcastRecvV2", + "CollectiveBcastSend", + "CollectiveBcastSendV2", + "CollectiveGather", + "CollectiveGatherV2", + "CollectiveInitializeCommunicator", + "CollectiveReduce", + "CollectiveReduceScatterV2", + "CollectiveReduceV2", + "CollectiveReduceV3", + "CompositeTensorVariantFromComponents", + "CompositeTensorVariantToComponents", + "Abort", + "ControlTrigger", + "Enter", + "Exit", + "LoopCond", + "Merge", + "NextIteration", + "NoOp", + "RefEnter", + "RefExit", + "RefMerge", + "RefNextIteration", + "RefSelect", + "RefSwitch", + "Switch", + "DenseCountSparseOutput", + "RaggedCountSparseOutput", + "SparseCountSparseOutput", + "CTCBeamSearchDecoder", + "CTCGreedyDecoder", + "CTCLoss", + "CTCLossV2", + "CudnnRNN", + "CudnnRNNBackprop", + "CudnnRNNBackpropV2", + "CudnnRNNBackpropV3", + "CudnnRNNCanonicalToParams", + "CudnnRNNCanonicalToParamsV2", + "CudnnRNNParamsSize", + "CudnnRNNParamsToCanonical", + "CudnnRNNParamsToCanonicalV2", + "CudnnRNNV2", + "CudnnRNNV3", + "AccumulatorApplyGradient", + "AccumulatorNumAccumulated", + "AccumulatorSetGlobalStep", + "AccumulatorTakeGradient", + "Barrier", + "BarrierClose", + "BarrierIncompleteSize", + "BarrierInsertMany", + "BarrierReadySize", + "BarrierTakeMany", + "ConditionalAccumulator", + "DeleteSessionTensor", + "DynamicPartition", + "DynamicStitch", + "FIFOQueue", + "FIFOQueueV2", + "FakeQueue", + "GetSessionHandle", + "GetSessionHandleV2", + "GetSessionTensor", + "MapClear", + "MapIncompleteSize", + "MapPeek", + "MapSize", + "MapStage", + "MapUnstage", + "MapUnstageNoKey", + "OrderedMapClear", + "OrderedMapIncompleteSize", + "OrderedMapPeek", + "OrderedMapSize", + "OrderedMapStage", + "OrderedMapUnstage", + "OrderedMapUnstageNoKey", + "PaddingFIFOQueue", + "PaddingFIFOQueueV2", + "ParallelDynamicStitch", + "PriorityQueue", + "PriorityQueueV2", + "QueueClose", + "QueueCloseV2", + "QueueDequeue", + "QueueDequeueMany", + "QueueDequeueManyV2", + "QueueDequeueUpTo", + "QueueDequeueUpToV2", + "QueueDequeueV2", + "QueueEnqueue", + "QueueEnqueueMany", + "QueueEnqueueManyV2", + "QueueEnqueueV2", + "QueueIsClosed", + "QueueIsClosedV2", + "QueueSize", + "QueueSizeV2", + "RandomShuffleQueue", + "RandomShuffleQueueV2", + "RecordInput", + "ResourceAccumulatorApplyGradient", + "ResourceAccumulatorNumAccumulated", + "ResourceAccumulatorSetGlobalStep", + "ResourceAccumulatorTakeGradient", + "ResourceConditionalAccumulator", + "SparseAccumulatorApplyGradient", + "SparseAccumulatorTakeGradient", + "SparseConditionalAccumulator", + "Stack", + "StackClose", + "StackCloseV2", + "StackPop", + "StackPopV2", + "StackPush", + "StackPushV2", + "StackV2", + "Stage", + "StageClear", + "StagePeek", + "StageSize", + "TensorArray", + "TensorArrayClose", + "TensorArrayCloseV2", + "TensorArrayCloseV3", + "TensorArrayConcat", + "TensorArrayConcatV2", + "TensorArrayConcatV3", + "TensorArrayGather", + "TensorArrayGatherV2", + "TensorArrayGatherV3", + "TensorArrayGrad", + "TensorArrayGradV2", + "TensorArrayGradV3", + "TensorArrayGradWithShape", + "TensorArrayPack", + "TensorArrayRead", + "TensorArrayReadV2", + "TensorArrayReadV3", + "TensorArrayScatter", + "TensorArrayScatterV2", + "TensorArrayScatterV3", + "TensorArraySize", + "TensorArraySizeV2", + "TensorArraySizeV3", + "TensorArraySplit", + "TensorArraySplitV2", + "TensorArraySplitV3", + "TensorArrayUnpack", + "TensorArrayV2", + "TensorArrayV3", + "TensorArrayWrite", + "TensorArrayWriteV2", + "TensorArrayWriteV3", + "Unstage", + "AnonymousIterator", + "AnonymousIteratorV2", + "AnonymousIteratorV3", + "AnonymousMemoryCache", + "AnonymousMultiDeviceIterator", + "AnonymousMultiDeviceIteratorV3", + "AnonymousRandomSeedGenerator", + "AnonymousSeedGenerator", + "BatchDataset", + "BatchDatasetV2", + "CacheDataset", + "CacheDatasetV2", + "ConcatenateDataset", + "DatasetCardinality", + "DatasetFingerprint", + "DatasetToGraph", + "DatasetToGraphV2", + "DatasetToSingleElement", + "DeleteIterator", + "DeleteMemoryCache", + "DeleteMultiDeviceIterator", + "DeleteRandomSeedGenerator", + "DeleteSeedGenerator", + "DeserializeIterator", + "DummyMemoryCache", + "DummySeedGenerator", + "FilterByLastComponentDataset", + "FilterDataset", + "FinalizeDataset", + "FixedLengthRecordDataset", + "FixedLengthRecordDatasetV2", + "FlatMapDataset", + "GeneratorDataset", + "GetOptions", + "InterleaveDataset", + "Iterator", + "IteratorFromStringHandle", + "IteratorFromStringHandleV2", + "IteratorGetNext", + "IteratorGetNextAsOptional", + "IteratorGetNextSync", + "IteratorToStringHandle", + "IteratorV2", + "MakeIterator", + "MapDataset", + "MapDefun", + "ModelDataset", + "MultiDeviceIterator", + "MultiDeviceIteratorFromStringHandle", + "MultiDeviceIteratorGetNextFromShard", + "MultiDeviceIteratorInit", + "MultiDeviceIteratorToStringHandle", + "OneShotIterator", + "OptimizeDataset", + "OptimizeDatasetV2", + "OptionsDataset", + "PaddedBatchDataset", + "PaddedBatchDatasetV2", + "ParallelBatchDataset", + "ParallelFilterDataset", + "ParallelInterleaveDatasetV2", + "ParallelInterleaveDatasetV3", + "ParallelInterleaveDatasetV4", + "ParallelMapDataset", + "ParallelMapDatasetV2", + "PrefetchDataset", + "RangeDataset", + "ReduceDataset", + "RepeatDataset", + "RewriteDataset", + "SerializeIterator", + "ShardDataset", + "ShuffleAndRepeatDataset", + "ShuffleAndRepeatDatasetV2", + "ShuffleDataset", + "ShuffleDatasetV2", + "ShuffleDatasetV3", + "SkipDataset", + "SparseTensorSliceDataset", + "TFRecordDataset", + "TFRecordDatasetV2", + "TakeDataset", + "TensorDataset", + "TensorSliceDataset", + "TextLineDataset", + "UnwrapDatasetVariant", + "WindowDataset", + "WindowOp", + "WrapDatasetVariant", + "ZipDataset", + "Copy", + "CopyHost", + "DebugIdentity", + "DebugIdentityV2", + "DebugIdentityV3", + "DebugNanCount", + "DebugNumericSummary", + "DebugNumericSummaryV2", + "DecodeProtoV2", + "EncodeProto", + "AssertCardinalityDataset", + "AssertNextDataset", + "AssertPrevDataset", + "AutoShardDataset", + "BytesProducedStatsDataset", + "CSVDataset", + "CSVDatasetV2", + "CheckPinned", + "ChooseFastestBranchDataset", + "ChooseFastestDataset", + "CompressElement", + "ComputeBatchSize", + "DataServiceDataset", + "DataServiceDatasetV2", + "DataServiceDatasetV3", + "DataServiceDatasetV4", + "DatasetFromGraph", + "DatasetToTFRecord", + "DenseToSparseBatchDataset", + "DirectedInterleaveDataset", + "DistributedSave", + "DummyIterationCounter", + "ExperimentalAssertNextDataset", + "ExperimentalAutoShardDataset", + "ExperimentalBytesProducedStatsDataset", + "ExperimentalCSVDataset", + "ExperimentalChooseFastestDataset", + "ExperimentalDatasetCardinality", + "ExperimentalDatasetToTFRecord", + "ExperimentalDenseToSparseBatchDataset", + "ExperimentalDirectedInterleaveDataset", + "ExperimentalGroupByReducerDataset", + "ExperimentalGroupByWindowDataset", + "ExperimentalIgnoreErrorsDataset", + "ExperimentalIteratorGetDevice", + "ExperimentalLMDBDataset", + "ExperimentalLatencyStatsDataset", + "ExperimentalMapAndBatchDataset", + "ExperimentalMapDataset", + "ExperimentalMatchingFilesDataset", + "ExperimentalMaxIntraOpParallelismDataset", + "ExperimentalNonSerializableDataset", + "ExperimentalParallelInterleaveDataset", + "ExperimentalParseExampleDataset", + "ExperimentalPrivateThreadPoolDataset", + "ExperimentalRandomDataset", + "ExperimentalRebatchDataset", + "ExperimentalScanDataset", + "ExperimentalSetStatsAggregatorDataset", + "ExperimentalSleepDataset", + "ExperimentalSlidingWindowDataset", + "ExperimentalSqlDataset", + "ExperimentalStatsAggregatorHandle", + "ExperimentalStatsAggregatorSummary", + "ExperimentalTakeWhileDataset", + "ExperimentalThreadPoolDataset", + "ExperimentalThreadPoolHandle", + "ExperimentalUnbatchDataset", + "ExperimentalUniqueDataset", + "GetElementAtIndex", + "GlobalShuffleDataset", + "GroupByReducerDataset", + "GroupByWindowDataset", + "IgnoreErrorsDataset", + "IndexFlatMapDataset", + "InitializeTableFromDataset", + "IteratorGetDevice", + "IteratorGetModelProto", + "LMDBDataset", + "LatencyStatsDataset", + "LegacyParallelInterleaveDatasetV2", + "ListDataset", + "ListSnapshotChunksDataset", + "LoadDataset", + "MapAndBatchDataset", + "MatchingFilesDataset", + "MaxIntraOpParallelismDataset", + "NonSerializableDataset", + "ParallelInterleaveDataset", + "ParseExampleDataset", + "ParseExampleDatasetV2", + "PrivateThreadPoolDataset", + "RandomDataset", + "RandomDatasetV2", + "RebatchDataset", + "RebatchDatasetV2", + "RegisterDataset", + "RegisterDatasetV2", + "SamplingDataset", + "SaveDataset", + "SaveDatasetV2", + "ScanDataset", + "SetStatsAggregatorDataset", + "SleepDataset", + "SlidingWindowDataset", + "SnapshotChunkDataset", + "SnapshotDataset", + "SnapshotDatasetReader", + "SnapshotDatasetV2", + "SnapshotNestedDatasetReader", + "SqlDataset", + "StatsAggregatorHandle", + "StatsAggregatorHandleV2", + "StatsAggregatorSetSummaryWriter", + "StatsAggregatorSummary", + "TakeWhileDataset", + "ThreadPoolDataset", + "ThreadPoolHandle", + "UnbatchDataset", + "UncompressElement", + "UniqueDataset", + "WeightedFlatMapDataset", + "FileSystemSetConfiguration", + "Case", + "DeviceIndex", + "FakeParam", + "For", + "If", + "PartitionedCall", + "RemoteCall", + "StatefulPartitionedCall", + "StatelessCase", + "StatelessIf", + "StatelessWhile", + "SymbolicGradient", + "ToBool", + "While", + "AdjustContrast", + "AdjustContrastv2", + "AdjustHue", + "AdjustSaturation", + "CombinedNonMaxSuppression", + "CropAndResize", + "CropAndResizeGradBoxes", + "CropAndResizeGradImage", + "DecodeAndCropJpeg", + "DecodeBmp", + "DecodeGif", + "DecodeImage", + "DecodeJpeg", + "DecodePng", + "DecodeWebP", + "DrawBoundingBoxes", + "DrawBoundingBoxesV2", + "EncodeJpeg", + "EncodeJpegVariableQuality", + "EncodePng", + "ExtractGlimpse", + "ExtractGlimpseV2", + "ExtractJpegShape", + "GenerateBoundingBoxProposals", + "HSVToRGB", + "ImageProjectiveTransformV2", + "ImageProjectiveTransformV3", + "NonMaxSuppression", + "NonMaxSuppressionV2", + "NonMaxSuppressionV3", + "NonMaxSuppressionV4", + "NonMaxSuppressionV5", + "NonMaxSuppressionWithOverlaps", + "QuantizedResizeBilinear", + "RGBToHSV", + "RandomCrop", + "ResizeArea", + "ResizeBicubic", + "ResizeBicubicGrad", + "ResizeBilinear", + "ResizeBilinearGrad", + "ResizeNearestNeighbor", + "ResizeNearestNeighborGrad", + "SampleDistortedBoundingBox", + "SampleDistortedBoundingBoxV2", + "ScaleAndTranslate", + "ScaleAndTranslateGrad", + "StatelessSampleDistortedBoundingBox", + "FixedLengthRecordReader", + "FixedLengthRecordReaderV2", + "IdentityReader", + "IdentityReaderV2", + "LMDBReader", + "MatchingFiles", + "MergeV2Checkpoints", + "ReadFile", + "ReaderNumRecordsProduced", + "ReaderNumRecordsProducedV2", + "ReaderNumWorkUnitsCompleted", + "ReaderNumWorkUnitsCompletedV2", + "ReaderRead", + "ReaderReadUpTo", + "ReaderReadUpToV2", + "ReaderReadV2", + "ReaderReset", + "ReaderResetV2", + "ReaderRestoreState", + "ReaderRestoreStateV2", + "ReaderSerializeState", + "ReaderSerializeStateV2", + "Restore", + "RestoreSlice", + "RestoreV2", + "Save", + "SaveSlices", + "SaveV2", + "ShardedFilename", + "ShardedFilespec", + "TFRecordReader", + "TFRecordReaderV2", + "TextLineReader", + "TextLineReaderV2", + "WholeFileReader", + "WholeFileReaderV2", + "WriteFile", + "BandedTriangularSolve", + "BatchCholesky", + "BatchCholeskyGrad", + "BatchMatrixDeterminant", + "BatchMatrixInverse", + "BatchMatrixSolve", + "BatchMatrixSolveLs", + "BatchMatrixTriangularSolve", + "BatchSelfAdjointEig", + "BatchSelfAdjointEigV2", + "BatchSvd", + "Cholesky", + "CholeskyGrad", + "Eig", + "Einsum", + "LogMatrixDeterminant", + "Lu", + "MatrixDeterminant", + "MatrixExponential", + "MatrixInverse", + "MatrixLogarithm", + "MatrixSolve", + "MatrixSolveLs", + "MatrixSquareRoot", + "MatrixTriangularSolve", + "Qr", + "SelfAdjointEig", + "SelfAdjointEigV2", + "Svd", + "TridiagonalMatMul", + "TridiagonalSolve", + "EmptyTensorList", + "TensorListConcat", + "TensorListConcatLists", + "TensorListConcatV2", + "TensorListElementShape", + "TensorListFromTensor", + "TensorListGather", + "TensorListGetItem", + "TensorListLength", + "TensorListPopBack", + "TensorListPushBack", + "TensorListPushBackBatch", + "TensorListReserve", + "TensorListResize", + "TensorListScatter", + "TensorListScatterIntoExistingList", + "TensorListScatterV2", + "TensorListSetItem", + "TensorListSplit", + "TensorListStack", + "Assert", + "AudioSummary", + "AudioSummaryV2", + "HistogramSummary", + "ImageSummary", + "MergeSummary", + "Print", + "PrintV2", + "ScalarSummary", + "TensorSummary", + "TensorSummaryV2", + "Timestamp", + "AnonymousHashTable", + "AnonymousMutableDenseHashTable", + "AnonymousMutableHashTable", + "AnonymousMutableHashTableOfTensors", + "HashTable", + "HashTableV2", + "InitializeTable", + "InitializeTableFromTextFile", + "InitializeTableFromTextFileV2", + "InitializeTableV2", + "LookupTableExport", + "LookupTableExportV2", + "LookupTableFind", + "LookupTableFindV2", + "LookupTableImport", + "LookupTableImportV2", + "LookupTableInsert", + "LookupTableInsertV2", + "LookupTableRemoveV2", + "LookupTableSize", + "LookupTableSizeV2", + "MutableDenseHashTable", + "MutableDenseHashTableV2", + "MutableHashTable", + "MutableHashTableOfTensors", + "MutableHashTableOfTensorsV2", + "MutableHashTableV2", + "Roll", + "EmptyTensorMap", + "TensorMapErase", + "TensorMapHasKey", + "TensorMapInsert", + "TensorMapLookup", + "TensorMapSize", + "TensorMapStackKeys", + "Abs", + "AccumulateNV2", + "Acos", + "Acosh", + "Add", + "AddN", + "AddV2", + "All", + "Angle", + "Any", + "ApproximateEqual", + "ArgMax", + "ArgMin", + "Asin", + "Asinh", + "Atan", + "Atan2", + "Atanh", + "BatchMatMul", + "BatchMatMulV2", + "BatchMatMulV3", + "Betainc", + "Bincount", + "Bucketize", + "Cast", + "Ceil", + "ClipByValue", + "Complex", + "ComplexAbs", + "Conj", + "Cos", + "Cosh", + "Cross", + "Cumprod", + "Cumsum", + "CumulativeLogsumexp", + "DenseBincount", + "Digamma", + "Div", + "DivNoNan", + "Equal", + "Erf", + "Erfc", + "Erfinv", + "EuclideanNorm", + "Exp", + "Expm1", + "Floor", + "FloorDiv", + "FloorMod", + "Greater", + "GreaterEqual", + "HistogramFixedWidth", + "Igamma", + "IgammaGradA", + "Igammac", + "Imag", + "Inv", + "InvGrad", + "IsFinite", + "IsInf", + "IsNan", + "Less", + "LessEqual", + "Lgamma", + "LinSpace", + "Log", + "Log1p", + "LogicalAnd", + "LogicalNot", + "LogicalOr", + "MatMul", + "Max", + "Maximum", + "Mean", + "Min", + "Minimum", + "Mod", + "Mul", + "MulNoNan", + "Ndtri", + "Neg", + "NextAfter", + "NotEqual", + "Polygamma", + "Pow", + "Prod", + "QuantizeDownAndShrinkRange", + "QuantizedAdd", + "QuantizedMatMul", + "QuantizedMul", + "RaggedBincount", + "Range", + "Real", + "RealDiv", + "Reciprocal", + "ReciprocalGrad", + "RequantizationRange", + "RequantizationRangePerChannel", + "Requantize", + "RequantizePerChannel", + "Rint", + "Round", + "Rsqrt", + "RsqrtGrad", + "SegmentMax", + "SegmentMaxV2", + "SegmentMean", + "SegmentMin", + "SegmentMinV2", + "SegmentProd", + "SegmentProdV2", + "SegmentSum", + "SegmentSumV2", + "Select", + "SelectV2", + "Sigmoid", + "SigmoidGrad", + "Sign", + "Sin", + "Sinh", + "SobolSample", + "SparseBincount", + "SparseMatMul", + "SparseSegmentMean", + "SparseSegmentMeanGrad", + "SparseSegmentMeanGradV2", + "SparseSegmentMeanWithNumSegments", + "SparseSegmentSqrtN", + "SparseSegmentSqrtNGrad", + "SparseSegmentSqrtNGradV2", + "SparseSegmentSqrtNWithNumSegments", + "SparseSegmentSum", + "SparseSegmentSumGrad", + "SparseSegmentSumGradV2", + "SparseSegmentSumWithNumSegments", + "Sqrt", + "SqrtGrad", + "Square", + "SquaredDifference", + "Sub", + "Sum", + "Tan", + "Tanh", + "TanhGrad", + "TruncateDiv", + "TruncateMod", + "UnsortedSegmentMax", + "UnsortedSegmentMin", + "UnsortedSegmentProd", + "UnsortedSegmentSum", + "Xdivy", + "Xlog1py", + "Xlogy", + "Zeta", + "NcclAllReduce", + "NcclBroadcast", + "NcclReduce", + "ApproxTopK", + "AvgPool", + "AvgPool3D", + "AvgPool3DGrad", + "AvgPoolGrad", + "BatchNormWithGlobalNormalization", + "BatchNormWithGlobalNormalizationGrad", + "BiasAdd", + "BiasAddGrad", + "BiasAddV1", + "Conv", + "Conv2D", + "Conv2DBackpropFilter", + "Conv2DBackpropFilterV2", + "Conv2DBackpropInput", + "Conv2DBackpropInputV2", + "Conv3D", + "Conv3DBackpropFilter", + "Conv3DBackpropFilterV2", + "Conv3DBackpropInput", + "Conv3DBackpropInputV2", + "DataFormatDimMap", + "DataFormatVecPermute", + "DepthwiseConv2dNative", + "DepthwiseConv2dNativeBackpropFilter", + "DepthwiseConv2dNativeBackpropInput", + "Dilation2D", + "Dilation2DBackpropFilter", + "Dilation2DBackpropInput", + "Elu", + "EluGrad", + "FractionalAvgPool", + "FractionalAvgPoolGrad", + "FractionalMaxPool", + "FractionalMaxPoolGrad", + "FusedBatchNorm", + "FusedBatchNormGrad", + "FusedBatchNormGradV2", + "FusedBatchNormGradV3", + "FusedBatchNormV2", + "FusedBatchNormV3", + "FusedPadConv2D", + "FusedResizeAndPadConv2D", + "InTopK", + "InTopKV2", + "IsotonicRegression", + "L2Loss", + "LRN", + "LRNGrad", + "LeakyRelu", + "LeakyReluGrad", + "LogSoftmax", + "MaxPool", + "MaxPool3D", + "MaxPool3DGrad", + "MaxPool3DGradGrad", + "MaxPoolGrad", + "MaxPoolGradGrad", + "MaxPoolGradGradV2", + "MaxPoolGradGradWithArgmax", + "MaxPoolGradV2", + "MaxPoolGradWithArgmax", + "MaxPoolV2", + "MaxPoolWithArgmax", + "NthElement", + "QuantizedAvgPool", + "QuantizedBatchNormWithGlobalNormalization", + "QuantizedBiasAdd", + "QuantizedConv2D", + "QuantizedConv2DAndRelu", + "QuantizedConv2DAndReluAndRequantize", + "QuantizedConv2DAndRequantize", + "QuantizedConv2DPerChannel", + "QuantizedConv2DWithBias", + "QuantizedConv2DWithBiasAndRelu", + "QuantizedConv2DWithBiasAndReluAndRequantize", + "QuantizedConv2DWithBiasAndRequantize", + "QuantizedConv2DWithBiasSignedSumAndReluAndRequantize", + "QuantizedConv2DWithBiasSumAndRelu", + "QuantizedConv2DWithBiasSumAndReluAndRequantize", + "QuantizedDepthwiseConv2D", + "QuantizedDepthwiseConv2DWithBias", + "QuantizedDepthwiseConv2DWithBiasAndRelu", + "QuantizedDepthwiseConv2DWithBiasAndReluAndRequantize", + "QuantizedMatMulWithBias", + "QuantizedMatMulWithBiasAndDequantize", + "QuantizedMatMulWithBiasAndRelu", + "QuantizedMatMulWithBiasAndReluAndRequantize", + "QuantizedMatMulWithBiasAndRequantize", + "QuantizedMaxPool", + "QuantizedRelu", + "QuantizedRelu6", + "QuantizedReluX", + "Relu", + "Relu6", + "Relu6Grad", + "ReluGrad", + "Selu", + "SeluGrad", + "Softmax", + "SoftmaxCrossEntropyWithLogits", + "Softplus", + "SoftplusGrad", + "Softsign", + "SoftsignGrad", + "SparseSoftmaxCrossEntropyWithLogits", + "TopK", + "TopKV2", + "OptionalFromValue", + "OptionalGetValue", + "OptionalHasValue", + "OptionalNone", + "DecodeCSV", + "DecodeCompressed", + "DecodeJSONExample", + "DecodePaddedRaw", + "DecodeRaw", + "ParseExample", + "ParseExampleV2", + "ParseSequenceExample", + "ParseSequenceExampleV2", + "ParseSingleExample", + "ParseSingleSequenceExample", + "ParseTensor", + "SerializeTensor", + "StringToNumber", + "RaggedCross", + "RaggedFillEmptyRows", + "RaggedFillEmptyRowsGrad", + "RaggedGather", + "RaggedTensorFromVariant", + "RaggedTensorToSparse", + "RaggedTensorToTensor", + "RaggedTensorToVariant", + "RaggedTensorToVariantGradient", + "RaggedRange", + "RandomIndexShuffle", + "Multinomial", + "ParameterizedTruncatedNormal", + "RandomGamma", + "RandomGammaGrad", + "RandomPoisson", + "RandomPoissonV2", + "RandomShuffle", + "RandomStandardNormal", + "RandomUniform", + "RandomUniformInt", + "TruncatedNormal", + "AssignAddVariableOp", + "AssignSubVariableOp", + "AssignVariableOp", + "ConsumeMutexLock", + "DestroyResourceOp", + "DisableCopyOnRead", + "MutexLock", + "MutexV2", + "ReadVariableOp", + "ResourceGather", + "ResourceGatherNd", + "ResourceScatterAdd", + "ResourceScatterDiv", + "ResourceScatterMax", + "ResourceScatterMin", + "ResourceScatterMul", + "ResourceScatterSub", + "ResourceScatterUpdate", + "VarHandleOp", + "VarIsInitializedOp", + "VariableShape", + "BlockLSTM", + "BlockLSTMGrad", + "BlockLSTMGradV2", + "BlockLSTMV2", + "GRUBlockCell", + "GRUBlockCellGrad", + "LSTMBlockCell", + "LSTMBlockCellGrad", + "EagerPyFunc", + "PyFunc", + "PyFuncStateless", + "SdcaFprint", + "SdcaOptimizer", + "SdcaOptimizerV2", + "SdcaShrinkL1", + "Recv", + "Send", + "DenseToDenseSetOperation", + "DenseToSparseSetOperation", + "SetSize", + "SparseToSparseSetOperation", + "AddManySparseToTensorsMap", + "AddSparseToTensorsMap", + "DeserializeManySparse", + "DeserializeSparse", + "SerializeManySparse", + "SerializeSparse", + "SparseAdd", + "SparseAddGrad", + "SparseConcat", + "SparseCross", + "SparseCrossHashed", + "SparseCrossV2", + "SparseDenseCwiseAdd", + "SparseDenseCwiseDiv", + "SparseDenseCwiseMul", + "SparseFillEmptyRows", + "SparseFillEmptyRowsGrad", + "SparseReduceMax", + "SparseReduceMaxSparse", + "SparseReduceSum", + "SparseReduceSumSparse", + "SparseReorder", + "SparseReshape", + "SparseSlice", + "SparseSliceGrad", + "SparseSoftmax", + "SparseSparseMaximum", + "SparseSparseMinimum", + "SparseSplit", + "SparseTensorDenseAdd", + "SparseTensorDenseMatMul", + "SparseToDense", + "TakeManySparseFromTensorsMap", + "BesselI0", + "BesselI0e", + "BesselI1", + "BesselI1e", + "BesselJ0", + "BesselJ1", + "BesselK0", + "BesselK0e", + "BesselK1", + "BesselK1e", + "BesselY0", + "BesselY1", + "Dawsn", + "Expint", + "FresnelCos", + "FresnelSin", + "Spence", + "BatchFFT", + "BatchFFT2D", + "BatchFFT3D", + "BatchIFFT", + "BatchIFFT2D", + "BatchIFFT3D", + "FFT", + "FFT2D", + "FFT3D", + "FFTND", + "IFFT", + "IFFT2D", + "IFFT3D", + "IFFTND", + "IRFFT", + "IRFFT2D", + "IRFFT3D", + "IRFFTND", + "RFFT", + "RFFT2D", + "RFFT3D", + "RFFTND", + "Assign", + "AssignAdd", + "AssignSub", + "CountUpTo", + "DestroyTemporaryVariable", + "IsVariableInitialized", + "ResourceCountUpTo", + "ResourceScatterNdAdd", + "ResourceScatterNdMax", + "ResourceScatterNdMin", + "ResourceScatterNdSub", + "ResourceScatterNdUpdate", + "ScatterAdd", + "ScatterDiv", + "ScatterMax", + "ScatterMin", + "ScatterMul", + "ScatterNdAdd", + "ScatterNdMax", + "ScatterNdMin", + "ScatterNdSub", + "ScatterNdUpdate", + "ScatterSub", + "ScatterUpdate", + "TemporaryVariable", + "Variable", + "VariableV2", + "NonDeterministicInts", + "RngReadAndSkip", + "RngSkip", + "StatefulRandomBinomial", + "StatefulStandardNormal", + "StatefulStandardNormalV2", + "StatefulTruncatedNormal", + "StatefulUniform", + "StatefulUniformFullInt", + "StatefulUniformInt", + "StatelessMultinomial", + "StatelessParameterizedTruncatedNormal", + "StatelessRandomBinomial", + "StatelessRandomGammaV2", + "StatelessRandomNormal", + "StatelessRandomPoisson", + "StatelessRandomUniform", + "StatelessRandomUniformFullInt", + "StatelessRandomUniformInt", + "StatelessTruncatedNormal", + "StatelessRandomGammaV3", + "StatelessRandomGetAlg", + "StatelessRandomGetKeyCounter", + "StatelessRandomGetKeyCounterAlg", + "StatelessRandomNormalV2", + "StatelessRandomUniformFullIntV2", + "StatelessRandomUniformIntV2", + "StatelessRandomUniformV2", + "StatelessShuffle", + "StatelessTruncatedNormalV2", + "AsString", + "DecodeBase64", + "EncodeBase64", + "ReduceJoin", + "RegexFullMatch", + "RegexReplace", + "StaticRegexFullMatch", + "StaticRegexReplace", + "StringFormat", + "StringJoin", + "StringLength", + "StringLower", + "StringNGrams", + "StringSplit", + "StringSplitV2", + "StringStrip", + "StringToHashBucket", + "StringToHashBucketFast", + "StringToHashBucketStrong", + "StringUpper", + "Substr", + "UnicodeDecode", + "UnicodeDecodeWithOffsets", + "UnicodeEncode", + "UnicodeScript", + "UnicodeTranscode", + "UnsortedSegmentJoin", + "CloseSummaryWriter", + "CreateSummaryDbWriter", + "CreateSummaryFileWriter", + "FlushSummaryWriter", + "ImportEvent", + "SummaryWriter", + "WriteAudioSummary", + "WriteGraphSummary", + "WriteHistogramSummary", + "WriteImageSummary", + "WriteRawProtoSummary", + "WriteScalarSummary", + "WriteSummary", + "SyncDevice", + "AllToAll", + "AssignVariableXlaConcatND", + "CollectivePermute", + "ConfigureDistributedTPU", + "ConfigureTPUEmbedding", + "CrossReplicaSum", + "DynamicEnqueueTPUEmbeddingArbitraryTensorBatch", + "DynamicEnqueueTPUEmbeddingRaggedTensorBatch", + "EnqueueTPUEmbeddingArbitraryTensorBatch", + "EnqueueTPUEmbeddingIntegerBatch", + "EnqueueTPUEmbeddingRaggedTensorBatch", + "EnqueueTPUEmbeddingSparseBatch", + "EnqueueTPUEmbeddingSparseTensorBatch", + "InfeedDequeue", + "InfeedDequeueTuple", + "InfeedEnqueue", + "InfeedEnqueuePrelinearizedBuffer", + "InfeedEnqueueTuple", + "IsTPUEmbeddingInitialized", + "LoadTPUEmbeddingADAMParameters", + "LoadTPUEmbeddingAdadeltaParameters", + "LoadTPUEmbeddingAdagradMomentumParameters", + "LoadTPUEmbeddingAdagradParameters", + "LoadTPUEmbeddingCenteredRMSPropParameters", + "LoadTPUEmbeddingFTRLParameters", + "LoadTPUEmbeddingFrequencyEstimatorParameters", + "LoadTPUEmbeddingMDLAdagradLightParameters", + "LoadTPUEmbeddingMomentumParameters", + "LoadTPUEmbeddingProximalAdagradParameters", + "LoadTPUEmbeddingProximalYogiParameters", + "LoadTPUEmbeddingRMSPropParameters", + "LoadTPUEmbeddingStochasticGradientDescentParameters", + "OutfeedDequeue", + "OutfeedDequeueTuple", + "OutfeedDequeueTupleV2", + "OutfeedDequeueV2", + "OutfeedEnqueue", + "OutfeedEnqueueTuple", + "Prelinearize", + "PrelinearizeTuple", + "ReadVariableXlaSplitND", + "RecvTPUEmbeddingActivations", + "RetrieveTPUEmbeddingADAMParameters", + "RetrieveTPUEmbeddingAdadeltaParameters", + "RetrieveTPUEmbeddingAdagradMomentumParameters", + "RetrieveTPUEmbeddingAdagradParameters", + "RetrieveTPUEmbeddingCenteredRMSPropParameters", + "RetrieveTPUEmbeddingFTRLParameters", + "RetrieveTPUEmbeddingFrequencyEstimatorParameters", + "RetrieveTPUEmbeddingMDLAdagradLightParameters", + "RetrieveTPUEmbeddingMomentumParameters", + "RetrieveTPUEmbeddingProximalAdagradParameters", + "RetrieveTPUEmbeddingProximalYogiParameters", + "RetrieveTPUEmbeddingRMSPropParameters", + "RetrieveTPUEmbeddingStochasticGradientDescentParameters", + "SendTPUEmbeddingGradients", + "ShutdownDistributedTPU", + "TPUCompilationResult", + "TPUDummyInput", + "TPUEmbeddingActivations", + "TPUOrdinalSelector", + "TPUPartitionedCall", + "TPUReplicateMetadata", + "TPUReplicatedInput", + "TPUReplicatedOutput", + "WorkerHeartbeat", + "XlaConcatND", + "XlaSplitND", + "TPUPartitionedInput", + "TPUPartitionedInputV2", + "TPUPartitionedOutput", + "TPUPartitionedOutputV2", + "ApplyAdaMax", + "ApplyAdadelta", + "ApplyAdagrad", + "ApplyAdagradDA", + "ApplyAdagradV2", + "ApplyAdam", + "ApplyAddSign", + "ApplyCenteredRMSProp", + "ApplyFtrl", + "ApplyFtrlV2", + "ApplyGradientDescent", + "ApplyMomentum", + "ApplyPowerSign", + "ApplyProximalAdagrad", + "ApplyProximalGradientDescent", + "ApplyRMSProp", + "ResourceApplyAdaMax", + "ResourceApplyAdadelta", + "ResourceApplyAdagrad", + "ResourceApplyAdagradDA", + "ResourceApplyAdagradV2", + "ResourceApplyAdam", + "ResourceApplyAdamWithAmsgrad", + "ResourceApplyAddSign", + "ResourceApplyCenteredRMSProp", + "ResourceApplyFtrl", + "ResourceApplyFtrlV2", + "ResourceApplyGradientDescent", + "ResourceApplyKerasMomentum", + "ResourceApplyMomentum", + "ResourceApplyPowerSign", + "ResourceApplyProximalAdagrad", + "ResourceApplyProximalGradientDescent", + "ResourceApplyRMSProp", + "ResourceSparseApplyAdadelta", + "ResourceSparseApplyAdagrad", + "ResourceSparseApplyAdagradDA", + "ResourceSparseApplyAdagradV2", + "ResourceSparseApplyCenteredRMSProp", + "ResourceSparseApplyFtrl", + "ResourceSparseApplyFtrlV2", + "ResourceSparseApplyKerasMomentum", + "ResourceSparseApplyMomentum", + "ResourceSparseApplyProximalAdagrad", + "ResourceSparseApplyProximalGradientDescent", + "ResourceSparseApplyRMSProp", + "SparseApplyAdadelta", + "SparseApplyAdagrad", + "SparseApplyAdagradDA", + "SparseApplyAdagradV2", + "SparseApplyCenteredRMSProp", + "SparseApplyFtrl", + "SparseApplyFtrlV2", + "SparseApplyMomentum", + "SparseApplyProximalAdagrad", + "SparseApplyProximalGradientDescent", + "SparseApplyRMSProp", + "UniformDequantize", + "UniformQuantize", + "UniformQuantizedAdd", + "UniformQuantizedClipByValue", + "UniformQuantizedConvolution", + "UniformQuantizedConvolutionHybrid", + "UniformQuantizedDot", + "UniformQuantizedDotHybrid", + "UniformRequantize", + "CSRSparseMatrixComponents", + "CSRSparseMatrixToDense", + "CSRSparseMatrixToSparseTensor", + "DenseToCSRSparseMatrix", + "SparseMatrixAdd", + "SparseMatrixMatMul", + "SparseMatrixMul", + "SparseMatrixNNZ", + "SparseMatrixOrderingAMD", + "SparseMatrixSoftmax", + "SparseMatrixSoftmaxGrad", + "SparseMatrixSparseCholesky", + "SparseMatrixSparseMatMul", + "SparseMatrixTranspose", + "SparseMatrixZeros", + "SparseTensorToCSRSparseMatrix", + "ConvertToCooTensor", + "ConvertToListOfSparseCoreCooTensors", + "ConvertToSparseCoreCsrWrappedCooTensor", + "GetMinibatchSplitsWithPhysicalReplica", + "GetMinibatchesInCsrWithPhysicalReplica", + "GetStatsFromListOfSparseCoreCooTensors", + "GlobalIterId", + "SortListOfSparseCoreCooTensors", + "StoreMinibatchStatisticsInFdo", + "TPUAnnotateTensorsWithDynamicShape", + "TPUCopyWithDynamicShape", + "XlaSparseCoreAdagrad", + "XlaSparseCoreAdagradMomentum", + "XlaSparseCoreAdam", + "XlaSparseCoreFtrl", + "XlaSparseCoreSgd", + "XlaSparseDenseMatmul", + "XlaSparseDenseMatmulCustomCombinerOnTcGradWithAdagradAndCsrInput", + "XlaSparseDenseMatmulCustomCombinerOnTcGradWithAdagradMomentumAndCsrInput", + "XlaSparseDenseMatmulCustomCombinerOnTcGradWithAdamAndCsrInput", + "XlaSparseDenseMatmulCustomCombinerOnTcGradWithCsrInput", + "XlaSparseDenseMatmulCustomCombinerOnTcGradWithFtrlAndCsrInput", + "XlaSparseDenseMatmulCustomCombinerOnTcGradWithSgdAndCsrInput", + "XlaSparseDenseMatmulCustomCombinerOnTcWithCsrInput", + "XlaSparseDenseMatmulGradWithAdagradAndCsrInput", + "XlaSparseDenseMatmulGradWithAdagradAndStaticBufferSize", + "XlaSparseDenseMatmulGradWithAdagradMomentumAndCsrInput", + "XlaSparseDenseMatmulGradWithAdagradMomentumAndStaticBufferSize", + "XlaSparseDenseMatmulGradWithAdamAndCsrInput", + "XlaSparseDenseMatmulGradWithAdamAndStaticBufferSize", + "XlaSparseDenseMatmulGradWithCsrInput", + "XlaSparseDenseMatmulGradWithFtrlAndCsrInput", + "XlaSparseDenseMatmulGradWithFtrlAndStaticBufferSize", + "XlaSparseDenseMatmulGradWithSgdAndCsrInput", + "XlaSparseDenseMatmulGradWithSgdAndStaticBufferSize", + "XlaSparseDenseMatmulWithCsrInput", + "XlaSparseDenseMatmulWithStaticBufferSize", + "Fact" + ] +} \ No newline at end of file diff --git a/modelscan/scanners/keras/scan.py b/modelscan/scanners/keras/scan.py index 1e88c389..48c731e9 100644 --- a/modelscan/scanners/keras/scan.py +++ b/modelscan/scanners/keras/scan.py @@ -22,20 +22,6 @@ def scan(self, model: Model) -> Optional[ScanResults]: ]: return None - dep_error = self.handle_binary_dependencies() - if dep_error: - return ScanResults( - [], - [ - DependencyError( - self.name(), - f"To use {self.full_name()}, please install modelscan with tensorflow extras. `pip install 'modelscan[ tensorflow ]'` if you are using pip.", - model, - ) - ], - [], - ) - try: with zipfile.ZipFile(model.get_stream(), "r") as zip: file_names = zip.namelist() diff --git a/modelscan/scanners/saved_model/scan.py b/modelscan/scanners/saved_model/scan.py index 4c8f6f6b..5b4e42ac 100644 --- a/modelscan/scanners/saved_model/scan.py +++ b/modelscan/scanners/saved_model/scan.py @@ -5,14 +5,14 @@ from typing import List, Set, Optional, Dict, Any +# Use vendored protobuf files, but prefer tensorflow imports if available try: - import tensorflow from tensorflow.core.protobuf.saved_model_pb2 import SavedModel from tensorflow.python.keras.protobuf.saved_metadata_pb2 import SavedMetadata - - tensorflow_installed = True except ImportError: - tensorflow_installed = False + # Fallback to vendored protobuf files + from modelscan.vendored.saved_model_pb2 import SavedModel # type: ignore[attr-defined] + from modelscan.vendored.saved_metadata_pb2 import SavedMetadata # type: ignore[attr-defined] from modelscan.error import ( @@ -37,20 +37,6 @@ def scan( ]: return None - dep_error = self.handle_binary_dependencies() - if dep_error: - return ScanResults( - [], - [ - DependencyError( - self.name(), - f"To use {self.full_name()}, please install modelscan with tensorflow extras. `pip install 'modelscan[ tensorflow ]'` if you are using pip.", - model, - ) - ], - [], - ) - results = self._scan(model) return self.label_results(results) if results else None @@ -58,6 +44,31 @@ def scan( def _scan(self, model: Model) -> Optional[ScanResults]: raise NotImplementedError + @staticmethod + def _load_safe_operators() -> List[str]: + """Load the static list of safe TensorFlow operators from JSON file.""" + import os + import json + + # Get the path to the data directory + data_dir = os.path.join(os.path.dirname(__file__), "..", "..", "data") + operators_file = os.path.join(data_dir, "tensorflow_operators.json") + + try: + with open(operators_file, "r") as f: + data = json.load(f) + return [ + operator + for operator in list(data.get("operators", [])) + if operator[0] != "_" + ] + + except (FileNotFoundError, json.JSONDecodeError) as e: + logger.warning( + f"Could not load safe operators list: {e}. Using empty list." + ) + return [] + # This function checks for malicious operators in both Keras and Tensorflow @staticmethod def _check_for_unsafe_tf_keras_operator( @@ -67,12 +78,8 @@ def _check_for_unsafe_tf_keras_operator( unsafe_operators: Dict[str, Any], ) -> ScanResults: issues: List[Issue] = [] - all_operators = ( - tensorflow.raw_ops.__dict__.keys() if tensorflow_installed else [] - ) - all_safe_operators = [ - operator for operator in list(all_operators) if operator[0] != "_" - ] + # Load static list of safe TensorFlow operators + all_safe_operators = SavedModelScan._load_safe_operators() for op in raw_operator: if op in unsafe_operators: @@ -96,13 +103,6 @@ def _check_for_unsafe_tf_keras_operator( ) return ScanResults(issues, [], []) - def handle_binary_dependencies( - self, settings: Optional[Dict[str, Any]] = None - ) -> Optional[str]: - if not tensorflow_installed: - return DependencyError.name() - return None - @staticmethod def name() -> str: return "saved_model" diff --git a/modelscan/vendored/__init__.py b/modelscan/vendored/__init__.py new file mode 100644 index 00000000..730c8bd4 --- /dev/null +++ b/modelscan/vendored/__init__.py @@ -0,0 +1 @@ +"""Vendored third-party code for modelscan.""" diff --git a/modelscan/vendored/attr_value_pb2.py b/modelscan/vendored/attr_value_pb2.py new file mode 100644 index 00000000..824c4313 --- /dev/null +++ b/modelscan/vendored/attr_value_pb2.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/attr_value.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + tensor_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__pb2, +) +from modelscan.vendored import ( + tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2, +) +from modelscan.vendored import ( + types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n*tensorflow/core/framework/attr_value.proto\x12\ntensorflow\x1a&tensorflow/core/framework/tensor.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto"\xa6\x04\n\tAttrValue\x12\x0b\n\x01s\x18\x02 \x01(\x0cH\x00\x12\x0b\n\x01i\x18\x03 \x01(\x03H\x00\x12\x0b\n\x01\x66\x18\x04 \x01(\x02H\x00\x12\x0b\n\x01\x62\x18\x05 \x01(\x08H\x00\x12$\n\x04type\x18\x06 \x01(\x0e\x32\x14.tensorflow.DataTypeH\x00\x12-\n\x05shape\x18\x07 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProtoH\x00\x12)\n\x06tensor\x18\x08 \x01(\x0b\x32\x17.tensorflow.TensorProtoH\x00\x12/\n\x04list\x18\x01 \x01(\x0b\x32\x1f.tensorflow.AttrValue.ListValueH\x00\x12(\n\x04\x66unc\x18\n \x01(\x0b\x32\x18.tensorflow.NameAttrListH\x00\x12\x15\n\x0bplaceholder\x18\t \x01(\tH\x00\x1a\xe9\x01\n\tListValue\x12\t\n\x01s\x18\x02 \x03(\x0c\x12\r\n\x01i\x18\x03 \x03(\x03\x42\x02\x10\x01\x12\r\n\x01\x66\x18\x04 \x03(\x02\x42\x02\x10\x01\x12\r\n\x01\x62\x18\x05 \x03(\x08\x42\x02\x10\x01\x12&\n\x04type\x18\x06 \x03(\x0e\x32\x14.tensorflow.DataTypeB\x02\x10\x01\x12+\n\x05shape\x18\x07 \x03(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12\'\n\x06tensor\x18\x08 \x03(\x0b\x32\x17.tensorflow.TensorProto\x12&\n\x04\x66unc\x18\t \x03(\x0b\x32\x18.tensorflow.NameAttrListB\x07\n\x05value"\x92\x01\n\x0cNameAttrList\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x30\n\x04\x61ttr\x18\x02 \x03(\x0b\x32".tensorflow.NameAttrList.AttrEntry\x1a\x42\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.tensorflow.AttrValue:\x02\x38\x01\x42\x83\x01\n\x18org.tensorflow.frameworkB\x0f\x41ttrValueProtosP\x01ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/attr_value_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.attr_value_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\017AttrValueProtosP\001ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/attr_value_go_proto\370\001\001" + _ATTRVALUE_LISTVALUE.fields_by_name["i"]._options = None + _ATTRVALUE_LISTVALUE.fields_by_name["i"]._serialized_options = b"\020\001" + _ATTRVALUE_LISTVALUE.fields_by_name["f"]._options = None + _ATTRVALUE_LISTVALUE.fields_by_name["f"]._serialized_options = b"\020\001" + _ATTRVALUE_LISTVALUE.fields_by_name["b"]._options = None + _ATTRVALUE_LISTVALUE.fields_by_name["b"]._serialized_options = b"\020\001" + _ATTRVALUE_LISTVALUE.fields_by_name["type"]._options = None + _ATTRVALUE_LISTVALUE.fields_by_name["type"]._serialized_options = b"\020\001" + _NAMEATTRLIST_ATTRENTRY._options = None + _NAMEATTRLIST_ATTRENTRY._serialized_options = b"8\001" + _ATTRVALUE._serialized_start = 184 + _ATTRVALUE._serialized_end = 734 + _ATTRVALUE_LISTVALUE._serialized_start = 492 + _ATTRVALUE_LISTVALUE._serialized_end = 725 + _NAMEATTRLIST._serialized_start = 737 + _NAMEATTRLIST._serialized_end = 883 + _NAMEATTRLIST_ATTRENTRY._serialized_start = 817 + _NAMEATTRLIST_ATTRENTRY._serialized_end = 883 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/full_type_pb2.py b/modelscan/vendored/full_type_pb2.py new file mode 100644 index 00000000..58fe567d --- /dev/null +++ b/modelscan/vendored/full_type_pb2.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/full_type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b"\n)tensorflow/core/framework/full_type.proto\x12\ntensorflow\"\x7f\n\x0b\x46ullTypeDef\x12'\n\x07type_id\x18\x01 \x01(\x0e\x32\x16.tensorflow.FullTypeId\x12%\n\x04\x61rgs\x18\x02 \x03(\x0b\x32\x17.tensorflow.FullTypeDef\x12\x0b\n\x01s\x18\x03 \x01(\tH\x00\x12\x0b\n\x01i\x18\x04 \x01(\x03H\x00\x42\x06\n\x04\x61ttr*\xda\x04\n\nFullTypeId\x12\r\n\tTFT_UNSET\x10\x00\x12\x0b\n\x07TFT_VAR\x10\x01\x12\x0b\n\x07TFT_ANY\x10\x02\x12\x0f\n\x0bTFT_PRODUCT\x10\x03\x12\r\n\tTFT_NAMED\x10\x04\x12\x10\n\x0cTFT_FOR_EACH\x10\x14\x12\x10\n\x0cTFT_CALLABLE\x10\x64\x12\x0f\n\nTFT_TENSOR\x10\xe8\x07\x12\x0e\n\tTFT_ARRAY\x10\xe9\x07\x12\x11\n\x0cTFT_OPTIONAL\x10\xea\x07\x12\x10\n\x0bTFT_LITERAL\x10\xeb\x07\x12\x10\n\x0bTFT_ENCODED\x10\xec\x07\x12\x15\n\x10TFT_SHAPE_TENSOR\x10\xed\x07\x12\r\n\x08TFT_BOOL\x10\xc8\x01\x12\x0e\n\tTFT_UINT8\x10\xc9\x01\x12\x0f\n\nTFT_UINT16\x10\xca\x01\x12\x0f\n\nTFT_UINT32\x10\xcb\x01\x12\x0f\n\nTFT_UINT64\x10\xcc\x01\x12\r\n\x08TFT_INT8\x10\xcd\x01\x12\x0e\n\tTFT_INT16\x10\xce\x01\x12\x0e\n\tTFT_INT32\x10\xcf\x01\x12\x0e\n\tTFT_INT64\x10\xd0\x01\x12\r\n\x08TFT_HALF\x10\xd1\x01\x12\x0e\n\tTFT_FLOAT\x10\xd2\x01\x12\x0f\n\nTFT_DOUBLE\x10\xd3\x01\x12\x11\n\x0cTFT_BFLOAT16\x10\xd7\x01\x12\x12\n\rTFT_COMPLEX64\x10\xd4\x01\x12\x13\n\x0eTFT_COMPLEX128\x10\xd5\x01\x12\x0f\n\nTFT_STRING\x10\xd6\x01\x12\x10\n\x0bTFT_DATASET\x10\xf6N\x12\x0f\n\nTFT_RAGGED\x10\xf7N\x12\x11\n\x0cTFT_ITERATOR\x10\xf8N\x12\x13\n\x0eTFT_MUTEX_LOCK\x10\xdaO\x12\x17\n\x12TFT_LEGACY_VARIANT\x10\xdbOB\x81\x01\n\x18org.tensorflow.frameworkB\x0e\x46ullTypeProtosP\x01ZPgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/full_type_go_proto\xf8\x01\x01\x62\x06proto3" +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.full_type_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\016FullTypeProtosP\001ZPgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/full_type_go_proto\370\001\001" + _FULLTYPEID._serialized_start = 187 + _FULLTYPEID._serialized_end = 789 + _FULLTYPEDEF._serialized_start = 57 + _FULLTYPEDEF._serialized_end = 184 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/function_pb2.py b/modelscan/vendored/function_pb2.py new file mode 100644 index 00000000..6ac03d81 --- /dev/null +++ b/modelscan/vendored/function_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/function.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + attr_value_pb2 as tensorflow_dot_core_dot_framework_dot_attr__value__pb2, +) +from modelscan.vendored import ( + node_def_pb2 as tensorflow_dot_core_dot_framework_dot_node__def__pb2, +) +from modelscan.vendored import ( + op_def_pb2 as tensorflow_dot_core_dot_framework_dot_op__def__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n(tensorflow/core/framework/function.proto\x12\ntensorflow\x1a*tensorflow/core/framework/attr_value.proto\x1a(tensorflow/core/framework/node_def.proto\x1a&tensorflow/core/framework/op_def.proto"\xa8\x01\n\x12\x46unctionDefLibrary\x12)\n\x08\x66unction\x18\x01 \x03(\x0b\x32\x17.tensorflow.FunctionDef\x12)\n\x08gradient\x18\x02 \x03(\x0b\x32\x17.tensorflow.GradientDef\x12<\n\x14registered_gradients\x18\x03 \x03(\x0b\x32\x1e.tensorflow.RegisteredGradient"\xc4\x06\n\x0b\x46unctionDef\x12$\n\tsignature\x18\x01 \x01(\x0b\x32\x11.tensorflow.OpDef\x12/\n\x04\x61ttr\x18\x05 \x03(\x0b\x32!.tensorflow.FunctionDef.AttrEntry\x12\x36\n\x08\x61rg_attr\x18\x07 \x03(\x0b\x32$.tensorflow.FunctionDef.ArgAttrEntry\x12P\n\x16resource_arg_unique_id\x18\x08 \x03(\x0b\x32\x30.tensorflow.FunctionDef.ResourceArgUniqueIdEntry\x12%\n\x08node_def\x18\x03 \x03(\x0b\x32\x13.tensorflow.NodeDef\x12-\n\x03ret\x18\x04 \x03(\x0b\x32 .tensorflow.FunctionDef.RetEntry\x12<\n\x0b\x63ontrol_ret\x18\x06 \x03(\x0b\x32\'.tensorflow.FunctionDef.ControlRetEntry\x1a\x42\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.tensorflow.AttrValue:\x02\x38\x01\x1a\x88\x01\n\x08\x41rgAttrs\x12\x38\n\x04\x61ttr\x18\x01 \x03(\x0b\x32*.tensorflow.FunctionDef.ArgAttrs.AttrEntry\x1a\x42\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.tensorflow.AttrValue:\x02\x38\x01\x1aP\n\x0c\x41rgAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12/\n\x05value\x18\x02 \x01(\x0b\x32 .tensorflow.FunctionDef.ArgAttrs:\x02\x38\x01\x1a:\n\x18ResourceArgUniqueIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a*\n\x08RetEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0f\x43ontrolRetEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x02\x10\x03";\n\x0bGradientDef\x12\x15\n\rfunction_name\x18\x01 \x01(\t\x12\x15\n\rgradient_func\x18\x02 \x01(\t"G\n\x12RegisteredGradient\x12\x15\n\rgradient_func\x18\x01 \x01(\t\x12\x1a\n\x12registered_op_type\x18\x02 \x01(\tB\x80\x01\n\x18org.tensorflow.frameworkB\x0e\x46unctionProtosP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/function_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.function_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\016FunctionProtosP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/function_go_proto\370\001\001" + _FUNCTIONDEF_ATTRENTRY._options = None + _FUNCTIONDEF_ATTRENTRY._serialized_options = b"8\001" + _FUNCTIONDEF_ARGATTRS_ATTRENTRY._options = None + _FUNCTIONDEF_ARGATTRS_ATTRENTRY._serialized_options = b"8\001" + _FUNCTIONDEF_ARGATTRENTRY._options = None + _FUNCTIONDEF_ARGATTRENTRY._serialized_options = b"8\001" + _FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._options = None + _FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._serialized_options = b"8\001" + _FUNCTIONDEF_RETENTRY._options = None + _FUNCTIONDEF_RETENTRY._serialized_options = b"8\001" + _FUNCTIONDEF_CONTROLRETENTRY._options = None + _FUNCTIONDEF_CONTROLRETENTRY._serialized_options = b"8\001" + _FUNCTIONDEFLIBRARY._serialized_start = 183 + _FUNCTIONDEFLIBRARY._serialized_end = 351 + _FUNCTIONDEF._serialized_start = 354 + _FUNCTIONDEF._serialized_end = 1190 + _FUNCTIONDEF_ATTRENTRY._serialized_start = 742 + _FUNCTIONDEF_ATTRENTRY._serialized_end = 808 + _FUNCTIONDEF_ARGATTRS._serialized_start = 811 + _FUNCTIONDEF_ARGATTRS._serialized_end = 947 + _FUNCTIONDEF_ARGATTRS_ATTRENTRY._serialized_start = 742 + _FUNCTIONDEF_ARGATTRS_ATTRENTRY._serialized_end = 808 + _FUNCTIONDEF_ARGATTRENTRY._serialized_start = 949 + _FUNCTIONDEF_ARGATTRENTRY._serialized_end = 1029 + _FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._serialized_start = 1031 + _FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._serialized_end = 1089 + _FUNCTIONDEF_RETENTRY._serialized_start = 1091 + _FUNCTIONDEF_RETENTRY._serialized_end = 1133 + _FUNCTIONDEF_CONTROLRETENTRY._serialized_start = 1135 + _FUNCTIONDEF_CONTROLRETENTRY._serialized_end = 1184 + _GRADIENTDEF._serialized_start = 1192 + _GRADIENTDEF._serialized_end = 1251 + _REGISTEREDGRADIENT._serialized_start = 1253 + _REGISTEREDGRADIENT._serialized_end = 1324 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/graph_debug_info_pb2.py b/modelscan/vendored/graph_debug_info_pb2.py new file mode 100644 index 00000000..4b9942eb --- /dev/null +++ b/modelscan/vendored/graph_debug_info_pb2.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/graph_debug_info.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n0tensorflow/core/framework/graph_debug_info.proto\x12\ntensorflow"\xa3\x06\n\x0eGraphDebugInfo\x12\r\n\x05\x66iles\x18\x01 \x03(\t\x12@\n\x0c\x66rames_by_id\x18\x04 \x03(\x0b\x32*.tensorflow.GraphDebugInfo.FramesByIdEntry\x12@\n\x0ctraces_by_id\x18\x06 \x03(\x0b\x32*.tensorflow.GraphDebugInfo.TracesByIdEntry\x12\x36\n\x06traces\x18\x02 \x03(\x0b\x32&.tensorflow.GraphDebugInfo.TracesEntry\x12G\n\x10name_to_trace_id\x18\x05 \x03(\x0b\x32-.tensorflow.GraphDebugInfo.NameToTraceIdEntry\x1aX\n\x0b\x46ileLineCol\x12\x12\n\nfile_index\x18\x01 \x01(\x05\x12\x0c\n\x04line\x18\x02 \x01(\x05\x12\x0b\n\x03\x63ol\x18\x03 \x01(\x05\x12\x0c\n\x04\x66unc\x18\x04 \x01(\t\x12\x0c\n\x04\x63ode\x18\x05 \x01(\t\x1a\x62\n\nStackTrace\x12>\n\x0e\x66ile_line_cols\x18\x01 \x03(\x0b\x32&.tensorflow.GraphDebugInfo.FileLineCol\x12\x14\n\x08\x66rame_id\x18\x02 \x03(\x06\x42\x02\x10\x01\x1aY\n\x0f\x46ramesByIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.tensorflow.GraphDebugInfo.FileLineCol:\x02\x38\x01\x1aX\n\x0fTracesByIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.tensorflow.GraphDebugInfo.StackTrace:\x02\x38\x01\x1aT\n\x0bTracesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.tensorflow.GraphDebugInfo.StackTrace:\x02\x38\x01\x1a\x34\n\x12NameToTraceIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x42\x8c\x01\n\x18org.tensorflow.frameworkB\x14GraphDebugInfoProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.graph_debug_info_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\024GraphDebugInfoProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001" + _GRAPHDEBUGINFO_STACKTRACE.fields_by_name["frame_id"]._options = None + _GRAPHDEBUGINFO_STACKTRACE.fields_by_name[ + "frame_id" + ]._serialized_options = b"\020\001" + _GRAPHDEBUGINFO_FRAMESBYIDENTRY._options = None + _GRAPHDEBUGINFO_FRAMESBYIDENTRY._serialized_options = b"8\001" + _GRAPHDEBUGINFO_TRACESBYIDENTRY._options = None + _GRAPHDEBUGINFO_TRACESBYIDENTRY._serialized_options = b"8\001" + _GRAPHDEBUGINFO_TRACESENTRY._options = None + _GRAPHDEBUGINFO_TRACESENTRY._serialized_options = b"8\001" + _GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._options = None + _GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._serialized_options = b"8\001" + _GRAPHDEBUGINFO._serialized_start = 65 + _GRAPHDEBUGINFO._serialized_end = 868 + _GRAPHDEBUGINFO_FILELINECOL._serialized_start = 359 + _GRAPHDEBUGINFO_FILELINECOL._serialized_end = 447 + _GRAPHDEBUGINFO_STACKTRACE._serialized_start = 449 + _GRAPHDEBUGINFO_STACKTRACE._serialized_end = 547 + _GRAPHDEBUGINFO_FRAMESBYIDENTRY._serialized_start = 549 + _GRAPHDEBUGINFO_FRAMESBYIDENTRY._serialized_end = 638 + _GRAPHDEBUGINFO_TRACESBYIDENTRY._serialized_start = 640 + _GRAPHDEBUGINFO_TRACESBYIDENTRY._serialized_end = 728 + _GRAPHDEBUGINFO_TRACESENTRY._serialized_start = 730 + _GRAPHDEBUGINFO_TRACESENTRY._serialized_end = 814 + _GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._serialized_start = 816 + _GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._serialized_end = 868 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/graph_pb2.py b/modelscan/vendored/graph_pb2.py new file mode 100644 index 00000000..ec62bfe1 --- /dev/null +++ b/modelscan/vendored/graph_pb2.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/graph.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + function_pb2 as tensorflow_dot_core_dot_framework_dot_function__pb2, +) +from modelscan.vendored import ( + graph_debug_info_pb2 as tensorflow_dot_core_dot_framework_dot_graph__debug__info__pb2, +) +from modelscan.vendored import ( + node_def_pb2 as tensorflow_dot_core_dot_framework_dot_node__def__pb2, +) +from modelscan.vendored import ( + versions_pb2 as tensorflow_dot_core_dot_framework_dot_versions__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n%tensorflow/core/framework/graph.proto\x12\ntensorflow\x1a(tensorflow/core/framework/function.proto\x1a\x30tensorflow/core/framework/graph_debug_info.proto\x1a(tensorflow/core/framework/node_def.proto\x1a(tensorflow/core/framework/versions.proto"\xcd\x01\n\x08GraphDef\x12!\n\x04node\x18\x01 \x03(\x0b\x32\x13.tensorflow.NodeDef\x12(\n\x08versions\x18\x04 \x01(\x0b\x32\x16.tensorflow.VersionDef\x12\x13\n\x07version\x18\x03 \x01(\x05\x42\x02\x18\x01\x12/\n\x07library\x18\x02 \x01(\x0b\x32\x1e.tensorflow.FunctionDefLibrary\x12.\n\ndebug_info\x18\x05 \x01(\x0b\x32\x1a.tensorflow.GraphDebugInfoBz\n\x18org.tensorflow.frameworkB\x0bGraphProtosP\x01ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/graph_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.graph_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\013GraphProtosP\001ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/graph_go_proto\370\001\001" + _GRAPHDEF.fields_by_name["version"]._options = None + _GRAPHDEF.fields_by_name["version"]._serialized_options = b"\030\001" + _GRAPHDEF._serialized_start = 230 + _GRAPHDEF._serialized_end = 435 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/keras_versions_pb2.py b/modelscan/vendored/keras_versions_pb2.py new file mode 100644 index 00000000..08560246 --- /dev/null +++ b/modelscan/vendored/keras_versions_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/python/keras/protobuf/versions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n/tensorflow/python/keras/protobuf/versions.proto\x12,third_party.tensorflow.python.keras.protobuf"K\n\nVersionDef\x12\x10\n\x08producer\x18\x01 \x01(\x05\x12\x14\n\x0cmin_consumer\x18\x02 \x01(\x05\x12\x15\n\rbad_consumers\x18\x03 \x03(\x05\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.python.keras.protobuf.versions_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _VERSIONDEF._serialized_start = 97 + _VERSIONDEF._serialized_end = 172 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/meta_graph_pb2.py b/modelscan/vendored/meta_graph_pb2.py new file mode 100644 index 00000000..52e6155e --- /dev/null +++ b/modelscan/vendored/meta_graph_pb2.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/protobuf/meta_graph.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from modelscan.vendored import ( + graph_pb2 as tensorflow_dot_core_dot_framework_dot_graph__pb2, +) +from modelscan.vendored import ( + op_def_pb2 as tensorflow_dot_core_dot_framework_dot_op__def__pb2, +) +from modelscan.vendored import ( + tensor_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__pb2, +) +from modelscan.vendored import ( + tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2, +) +from modelscan.vendored import ( + types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2, +) +from modelscan.vendored import ( + saved_object_graph_pb2 as tensorflow_dot_core_dot_protobuf_dot_saved__object__graph__pb2, +) +from modelscan.vendored import ( + saver_pb2 as tensorflow_dot_core_dot_protobuf_dot_saver__pb2, +) +from modelscan.vendored import ( + struct_pb2 as tensorflow_dot_core_dot_protobuf_dot_struct__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n)tensorflow/core/protobuf/meta_graph.proto\x12\ntensorflow\x1a\x19google/protobuf/any.proto\x1a%tensorflow/core/framework/graph.proto\x1a&tensorflow/core/framework/op_def.proto\x1a&tensorflow/core/framework/tensor.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto\x1a\x31tensorflow/core/protobuf/saved_object_graph.proto\x1a$tensorflow/core/protobuf/saver.proto\x1a%tensorflow/core/protobuf/struct.proto"\xa8\x07\n\x0cMetaGraphDef\x12;\n\rmeta_info_def\x18\x01 \x01(\x0b\x32$.tensorflow.MetaGraphDef.MetaInfoDef\x12\'\n\tgraph_def\x18\x02 \x01(\x0b\x32\x14.tensorflow.GraphDef\x12\'\n\tsaver_def\x18\x03 \x01(\x0b\x32\x14.tensorflow.SaverDef\x12\x43\n\x0e\x63ollection_def\x18\x04 \x03(\x0b\x32+.tensorflow.MetaGraphDef.CollectionDefEntry\x12\x41\n\rsignature_def\x18\x05 \x03(\x0b\x32*.tensorflow.MetaGraphDef.SignatureDefEntry\x12\x30\n\x0e\x61sset_file_def\x18\x06 \x03(\x0b\x32\x18.tensorflow.AssetFileDef\x12\x36\n\x10object_graph_def\x18\x07 \x01(\x0b\x32\x1c.tensorflow.SavedObjectGraph\x1a\xf6\x02\n\x0bMetaInfoDef\x12\x1a\n\x12meta_graph_version\x18\x01 \x01(\t\x12,\n\x10stripped_op_list\x18\x02 \x01(\x0b\x32\x12.tensorflow.OpList\x12&\n\x08\x61ny_info\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12\x1a\n\x12tensorflow_version\x18\x05 \x01(\t\x12\x1e\n\x16tensorflow_git_version\x18\x06 \x01(\t\x12\x1e\n\x16stripped_default_attrs\x18\x07 \x01(\x08\x12S\n\x10\x66unction_aliases\x18\x08 \x03(\x0b\x32\x39.tensorflow.MetaGraphDef.MetaInfoDef.FunctionAliasesEntry\x1a\x36\n\x14\x46unctionAliasesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aO\n\x12\x43ollectionDefEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x19.tensorflow.CollectionDef:\x02\x38\x01\x1aM\n\x11SignatureDefEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\'\n\x05value\x18\x02 \x01(\x0b\x32\x18.tensorflow.SignatureDef:\x02\x38\x01"\xdf\x03\n\rCollectionDef\x12\x37\n\tnode_list\x18\x01 \x01(\x0b\x32".tensorflow.CollectionDef.NodeListH\x00\x12\x39\n\nbytes_list\x18\x02 \x01(\x0b\x32#.tensorflow.CollectionDef.BytesListH\x00\x12\x39\n\nint64_list\x18\x03 \x01(\x0b\x32#.tensorflow.CollectionDef.Int64ListH\x00\x12\x39\n\nfloat_list\x18\x04 \x01(\x0b\x32#.tensorflow.CollectionDef.FloatListH\x00\x12\x35\n\x08\x61ny_list\x18\x05 \x01(\x0b\x32!.tensorflow.CollectionDef.AnyListH\x00\x1a\x19\n\x08NodeList\x12\r\n\x05value\x18\x01 \x03(\t\x1a\x1a\n\tBytesList\x12\r\n\x05value\x18\x01 \x03(\x0c\x1a\x1e\n\tInt64List\x12\x11\n\x05value\x18\x01 \x03(\x03\x42\x02\x10\x01\x1a\x1e\n\tFloatList\x12\x11\n\x05value\x18\x01 \x03(\x02\x42\x02\x10\x01\x1a.\n\x07\x41nyList\x12#\n\x05value\x18\x01 \x03(\x0b\x32\x14.google.protobuf.AnyB\x06\n\x04kind"\xd1\x03\n\nTensorInfo\x12\x0e\n\x04name\x18\x01 \x01(\tH\x00\x12\x36\n\ncoo_sparse\x18\x04 \x01(\x0b\x32 .tensorflow.TensorInfo.CooSparseH\x00\x12\x42\n\x10\x63omposite_tensor\x18\x05 \x01(\x0b\x32&.tensorflow.TensorInfo.CompositeTensorH\x00\x12#\n\x05\x64type\x18\x02 \x01(\x0e\x32\x14.tensorflow.DataType\x12\x32\n\x0ctensor_shape\x18\x03 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x1a\x65\n\tCooSparse\x12\x1a\n\x12values_tensor_name\x18\x01 \x01(\t\x12\x1b\n\x13indices_tensor_name\x18\x02 \x01(\t\x12\x1f\n\x17\x64\x65nse_shape_tensor_name\x18\x03 \x01(\t\x1ak\n\x0f\x43ompositeTensor\x12,\n\ttype_spec\x18\x01 \x01(\x0b\x32\x19.tensorflow.TypeSpecProto\x12*\n\ncomponents\x18\x02 \x03(\x0b\x32\x16.tensorflow.TensorInfoB\n\n\x08\x65ncoding"\xa4\x03\n\x0cSignatureDef\x12\x34\n\x06inputs\x18\x01 \x03(\x0b\x32$.tensorflow.SignatureDef.InputsEntry\x12\x36\n\x07outputs\x18\x02 \x03(\x0b\x32%.tensorflow.SignatureDef.OutputsEntry\x12\x13\n\x0bmethod_name\x18\x03 \x01(\t\x12\x38\n\x08\x64\x65\x66\x61ults\x18\x04 \x03(\x0b\x32&.tensorflow.SignatureDef.DefaultsEntry\x1a\x45\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorflow.TensorInfo:\x02\x38\x01\x1a\x46\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorflow.TensorInfo:\x02\x38\x01\x1aH\n\rDefaultsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.tensorflow.TensorProto:\x02\x38\x01"M\n\x0c\x41ssetFileDef\x12+\n\x0btensor_info\x18\x01 \x01(\x0b\x32\x16.tensorflow.TensorInfo\x12\x10\n\x08\x66ilename\x18\x02 \x01(\tB\x87\x01\n\x18org.tensorflow.frameworkB\x0fMetaGraphProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.protobuf.meta_graph_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\017MetaGraphProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001" + _METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._options = None + _METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._serialized_options = b"8\001" + _METAGRAPHDEF_COLLECTIONDEFENTRY._options = None + _METAGRAPHDEF_COLLECTIONDEFENTRY._serialized_options = b"8\001" + _METAGRAPHDEF_SIGNATUREDEFENTRY._options = None + _METAGRAPHDEF_SIGNATUREDEFENTRY._serialized_options = b"8\001" + _COLLECTIONDEF_INT64LIST.fields_by_name["value"]._options = None + _COLLECTIONDEF_INT64LIST.fields_by_name["value"]._serialized_options = b"\020\001" + _COLLECTIONDEF_FLOATLIST.fields_by_name["value"]._options = None + _COLLECTIONDEF_FLOATLIST.fields_by_name["value"]._serialized_options = b"\020\001" + _SIGNATUREDEF_INPUTSENTRY._options = None + _SIGNATUREDEF_INPUTSENTRY._serialized_options = b"8\001" + _SIGNATUREDEF_OUTPUTSENTRY._options = None + _SIGNATUREDEF_OUTPUTSENTRY._serialized_options = b"8\001" + _SIGNATUREDEF_DEFAULTSENTRY._options = None + _SIGNATUREDEF_DEFAULTSENTRY._serialized_options = b"8\001" + _METAGRAPHDEF._serialized_start = 417 + _METAGRAPHDEF._serialized_end = 1353 + _METAGRAPHDEF_METAINFODEF._serialized_start = 819 + _METAGRAPHDEF_METAINFODEF._serialized_end = 1193 + _METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._serialized_start = 1139 + _METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._serialized_end = 1193 + _METAGRAPHDEF_COLLECTIONDEFENTRY._serialized_start = 1195 + _METAGRAPHDEF_COLLECTIONDEFENTRY._serialized_end = 1274 + _METAGRAPHDEF_SIGNATUREDEFENTRY._serialized_start = 1276 + _METAGRAPHDEF_SIGNATUREDEFENTRY._serialized_end = 1353 + _COLLECTIONDEF._serialized_start = 1356 + _COLLECTIONDEF._serialized_end = 1835 + _COLLECTIONDEF_NODELIST._serialized_start = 1662 + _COLLECTIONDEF_NODELIST._serialized_end = 1687 + _COLLECTIONDEF_BYTESLIST._serialized_start = 1689 + _COLLECTIONDEF_BYTESLIST._serialized_end = 1715 + _COLLECTIONDEF_INT64LIST._serialized_start = 1717 + _COLLECTIONDEF_INT64LIST._serialized_end = 1747 + _COLLECTIONDEF_FLOATLIST._serialized_start = 1749 + _COLLECTIONDEF_FLOATLIST._serialized_end = 1779 + _COLLECTIONDEF_ANYLIST._serialized_start = 1781 + _COLLECTIONDEF_ANYLIST._serialized_end = 1827 + _TENSORINFO._serialized_start = 1838 + _TENSORINFO._serialized_end = 2303 + _TENSORINFO_COOSPARSE._serialized_start = 2081 + _TENSORINFO_COOSPARSE._serialized_end = 2182 + _TENSORINFO_COMPOSITETENSOR._serialized_start = 2184 + _TENSORINFO_COMPOSITETENSOR._serialized_end = 2291 + _SIGNATUREDEF._serialized_start = 2306 + _SIGNATUREDEF._serialized_end = 2726 + _SIGNATUREDEF_INPUTSENTRY._serialized_start = 2511 + _SIGNATUREDEF_INPUTSENTRY._serialized_end = 2580 + _SIGNATUREDEF_OUTPUTSENTRY._serialized_start = 2582 + _SIGNATUREDEF_OUTPUTSENTRY._serialized_end = 2652 + _SIGNATUREDEF_DEFAULTSENTRY._serialized_start = 2654 + _SIGNATUREDEF_DEFAULTSENTRY._serialized_end = 2726 + _ASSETFILEDEF._serialized_start = 2728 + _ASSETFILEDEF._serialized_end = 2805 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/node_def_pb2.py b/modelscan/vendored/node_def_pb2.py new file mode 100644 index 00000000..b4f142c6 --- /dev/null +++ b/modelscan/vendored/node_def_pb2.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/node_def.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + attr_value_pb2 as tensorflow_dot_core_dot_framework_dot_attr__value__pb2, +) +from modelscan.vendored import ( + full_type_pb2 as tensorflow_dot_core_dot_framework_dot_full__type__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n(tensorflow/core/framework/node_def.proto\x12\ntensorflow\x1a*tensorflow/core/framework/attr_value.proto\x1a)tensorflow/core/framework/full_type.proto"\x86\x03\n\x07NodeDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\n\n\x02op\x18\x02 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12\x0e\n\x06\x64\x65vice\x18\x04 \x01(\t\x12+\n\x04\x61ttr\x18\x05 \x03(\x0b\x32\x1d.tensorflow.NodeDef.AttrEntry\x12J\n\x17\x65xperimental_debug_info\x18\x06 \x01(\x0b\x32).tensorflow.NodeDef.ExperimentalDebugInfo\x12\x32\n\x11\x65xperimental_type\x18\x07 \x01(\x0b\x32\x17.tensorflow.FullTypeDef\x1a\x42\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.tensorflow.AttrValue:\x02\x38\x01\x1aQ\n\x15\x45xperimentalDebugInfo\x12\x1b\n\x13original_node_names\x18\x01 \x03(\t\x12\x1b\n\x13original_func_names\x18\x02 \x03(\tB{\n\x18org.tensorflow.frameworkB\tNodeProtoP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/node_def_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.node_def_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\tNodeProtoP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/node_def_go_proto\370\001\001" + _NODEDEF_ATTRENTRY._options = None + _NODEDEF_ATTRENTRY._serialized_options = b"8\001" + _NODEDEF._serialized_start = 144 + _NODEDEF._serialized_end = 534 + _NODEDEF_ATTRENTRY._serialized_start = 385 + _NODEDEF_ATTRENTRY._serialized_end = 451 + _NODEDEF_EXPERIMENTALDEBUGINFO._serialized_start = 453 + _NODEDEF_EXPERIMENTALDEBUGINFO._serialized_end = 534 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/op_def_pb2.py b/modelscan/vendored/op_def_pb2.py new file mode 100644 index 00000000..70bad67d --- /dev/null +++ b/modelscan/vendored/op_def_pb2.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/op_def.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + attr_value_pb2 as tensorflow_dot_core_dot_framework_dot_attr__value__pb2, +) +from modelscan.vendored import ( + full_type_pb2 as tensorflow_dot_core_dot_framework_dot_full__type__pb2, +) +from modelscan.vendored import ( + resource_handle_pb2 as tensorflow_dot_core_dot_framework_dot_resource__handle__pb2, +) +from modelscan.vendored import ( + types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n&tensorflow/core/framework/op_def.proto\x12\ntensorflow\x1a*tensorflow/core/framework/attr_value.proto\x1a)tensorflow/core/framework/full_type.proto\x1a/tensorflow/core/framework/resource_handle.proto\x1a%tensorflow/core/framework/types.proto"\xf3\x06\n\x05OpDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\tinput_arg\x18\x02 \x03(\x0b\x32\x18.tensorflow.OpDef.ArgDef\x12,\n\noutput_arg\x18\x03 \x03(\x0b\x32\x18.tensorflow.OpDef.ArgDef\x12\x16\n\x0e\x63ontrol_output\x18\x14 \x03(\t\x12\'\n\x04\x61ttr\x18\x04 \x03(\x0b\x32\x19.tensorflow.OpDef.AttrDef\x12.\n\x0b\x64\x65precation\x18\x08 \x01(\x0b\x32\x19.tensorflow.OpDeprecation\x12\x0f\n\x07summary\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x16\n\x0eis_commutative\x18\x12 \x01(\x08\x12\x14\n\x0cis_aggregate\x18\x10 \x01(\x08\x12\x13\n\x0bis_stateful\x18\x11 \x01(\x08\x12"\n\x1a\x61llows_uninitialized_input\x18\x13 \x01(\x08\x12$\n\x1cis_distributed_communication\x18\x15 \x01(\x08\x1a\x9c\x02\n\x06\x41rgDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12"\n\x04type\x18\x03 \x01(\x0e\x32\x14.tensorflow.DataType\x12\x11\n\ttype_attr\x18\x04 \x01(\t\x12\x13\n\x0bnumber_attr\x18\x05 \x01(\t\x12\x16\n\x0etype_list_attr\x18\x06 \x01(\t\x12\x42\n\x0bhandle_data\x18\x07 \x03(\x0b\x32-.tensorflow.ResourceHandleProto.DtypeAndShape\x12\x0e\n\x06is_ref\x18\x10 \x01(\x08\x12\x37\n\x16\x65xperimental_full_type\x18\x11 \x01(\x0b\x32\x17.tensorflow.FullTypeDef\x1a\xbd\x01\n\x07\x41ttrDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12,\n\rdefault_value\x18\x03 \x01(\x0b\x32\x15.tensorflow.AttrValue\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x13\n\x0bhas_minimum\x18\x05 \x01(\x08\x12\x0f\n\x07minimum\x18\x06 \x01(\x03\x12-\n\x0e\x61llowed_values\x18\x07 \x01(\x0b\x32\x15.tensorflow.AttrValue"5\n\rOpDeprecation\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x13\n\x0b\x65xplanation\x18\x02 \x01(\t"\'\n\x06OpList\x12\x1d\n\x02op\x18\x01 \x03(\x0b\x32\x11.tensorflow.OpDefB{\n\x18org.tensorflow.frameworkB\x0bOpDefProtosP\x01ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/op_def_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.op_def_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\013OpDefProtosP\001ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/op_def_go_proto\370\001\001" + _OPDEF._serialized_start = 230 + _OPDEF._serialized_end = 1113 + _OPDEF_ARGDEF._serialized_start = 637 + _OPDEF_ARGDEF._serialized_end = 921 + _OPDEF_ATTRDEF._serialized_start = 924 + _OPDEF_ATTRDEF._serialized_end = 1113 + _OPDEPRECATION._serialized_start = 1115 + _OPDEPRECATION._serialized_end = 1168 + _OPLIST._serialized_start = 1170 + _OPLIST._serialized_end = 1209 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/resource_handle_pb2.py b/modelscan/vendored/resource_handle_pb2.py new file mode 100644 index 00000000..ee9ae89a --- /dev/null +++ b/modelscan/vendored/resource_handle_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/resource_handle.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2, +) +from modelscan.vendored import ( + types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n/tensorflow/core/framework/resource_handle.proto\x12\ntensorflow\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto"\xa5\x02\n\x13ResourceHandleProto\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t\x12\x11\n\tcontainer\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x11\n\thash_code\x18\x04 \x01(\x04\x12\x17\n\x0fmaybe_type_name\x18\x05 \x01(\t\x12H\n\x11\x64types_and_shapes\x18\x06 \x03(\x0b\x32-.tensorflow.ResourceHandleProto.DtypeAndShape\x1a\x61\n\rDtypeAndShape\x12#\n\x05\x64type\x18\x01 \x01(\x0e\x32\x14.tensorflow.DataType\x12+\n\x05shape\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProtoJ\x04\x08\x07\x10\x08\x42\x87\x01\n\x18org.tensorflow.frameworkB\x0eResourceHandleP\x01ZVgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/resource_handle_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.resource_handle_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\016ResourceHandleP\001ZVgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/resource_handle_go_proto\370\001\001" + _RESOURCEHANDLEPROTO._serialized_start = 149 + _RESOURCEHANDLEPROTO._serialized_end = 442 + _RESOURCEHANDLEPROTO_DTYPEANDSHAPE._serialized_start = 339 + _RESOURCEHANDLEPROTO_DTYPEANDSHAPE._serialized_end = 436 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/saved_metadata_pb2.py b/modelscan/vendored/saved_metadata_pb2.py new file mode 100644 index 00000000..11691631 --- /dev/null +++ b/modelscan/vendored/saved_metadata_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: tensorflow/python/keras/protobuf/saved_metadata.proto +# Protobuf Python Version: 5.28.3 (vendored for modelscan, compatible with 3.19+) +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + keras_versions_pb2 as tensorflow_dot_python_dot_keras_dot_protobuf_dot_versions__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n5tensorflow/python/keras/protobuf/saved_metadata.proto\x12,third_party.tensorflow.python.keras.protobuf\x1a/tensorflow/python/keras/protobuf/versions.proto"Y\n\rSavedMetadata\x12H\n\x05nodes\x18\x01 \x03(\x0b\x32\x39.third_party.tensorflow.python.keras.protobuf.SavedObject"\xa8\x01\n\x0bSavedObject\x12\x0f\n\x07node_id\x18\x02 \x01(\x05\x12\x11\n\tnode_path\x18\x03 \x01(\t\x12\x12\n\nidentifier\x18\x04 \x01(\t\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12I\n\x07version\x18\x06 \x01(\x0b\x32\x38.third_party.tensorflow.python.keras.protobuf.VersionDefJ\x04\x08\x01\x10\x02\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.python.keras.protobuf.saved_metadata_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals["_SAVEDMETADATA"]._serialized_start = 152 + _globals["_SAVEDMETADATA"]._serialized_end = 241 + _globals["_SAVEDOBJECT"]._serialized_start = 244 + _globals["_SAVEDOBJECT"]._serialized_end = 412 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/saved_model_pb2.py b/modelscan/vendored/saved_model_pb2.py new file mode 100644 index 00000000..b4a38f67 --- /dev/null +++ b/modelscan/vendored/saved_model_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/protobuf/saved_model.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + meta_graph_pb2 as tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n*tensorflow/core/protobuf/saved_model.proto\x12\ntensorflow\x1a)tensorflow/core/protobuf/meta_graph.proto"_\n\nSavedModel\x12"\n\x1asaved_model_schema_version\x18\x01 \x01(\x03\x12-\n\x0bmeta_graphs\x18\x02 \x03(\x0b\x32\x18.tensorflow.MetaGraphDefB\x88\x01\n\x18org.tensorflow.frameworkB\x10SavedModelProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.protobuf.saved_model_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\020SavedModelProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001" + _SAVEDMODEL._serialized_start = 101 + _SAVEDMODEL._serialized_end = 196 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/saved_object_graph_pb2.py b/modelscan/vendored/saved_object_graph_pb2.py new file mode 100644 index 00000000..c4f2a5e6 --- /dev/null +++ b/modelscan/vendored/saved_object_graph_pb2.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/protobuf/saved_object_graph.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from modelscan.vendored import ( + tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2, +) +from modelscan.vendored import ( + types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2, +) +from modelscan.vendored import ( + variable_pb2 as tensorflow_dot_core_dot_framework_dot_variable__pb2, +) +from modelscan.vendored import ( + versions_pb2 as tensorflow_dot_core_dot_framework_dot_versions__pb2, +) +from modelscan.vendored import ( + struct_pb2 as tensorflow_dot_core_dot_protobuf_dot_struct__pb2, +) +from modelscan.vendored import ( + trackable_object_graph_pb2 as tensorflow_dot_core_dot_protobuf_dot_trackable__object__graph__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n1tensorflow/core/protobuf/saved_object_graph.proto\x12\ntensorflow\x1a\x19google/protobuf/any.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto\x1a(tensorflow/core/framework/variable.proto\x1a(tensorflow/core/framework/versions.proto\x1a%tensorflow/core/protobuf/struct.proto\x1a\x35tensorflow/core/protobuf/trackable_object_graph.proto"\xe8\x01\n\x10SavedObjectGraph\x12&\n\x05nodes\x18\x01 \x03(\x0b\x32\x17.tensorflow.SavedObject\x12O\n\x12\x63oncrete_functions\x18\x02 \x03(\x0b\x32\x33.tensorflow.SavedObjectGraph.ConcreteFunctionsEntry\x1a[\n\x16\x43oncreteFunctionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.tensorflow.SavedConcreteFunction:\x02\x38\x01"\xd0\x07\n\x0bSavedObject\x12R\n\x08\x63hildren\x18\x01 \x03(\x0b\x32@.tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference\x12V\n\x0c\x64\x65pendencies\x18\x0f \x03(\x0b\x32@.tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference\x12^\n\x0eslot_variables\x18\x03 \x03(\x0b\x32\x46.tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference\x12\x32\n\x0buser_object\x18\x04 \x01(\x0b\x32\x1b.tensorflow.SavedUserObjectH\x00\x12\'\n\x05\x61sset\x18\x05 \x01(\x0b\x32\x16.tensorflow.SavedAssetH\x00\x12-\n\x08\x66unction\x18\x06 \x01(\x0b\x32\x19.tensorflow.SavedFunctionH\x00\x12-\n\x08variable\x18\x07 \x01(\x0b\x32\x19.tensorflow.SavedVariableH\x00\x12G\n\x16\x62\x61re_concrete_function\x18\x08 \x01(\x0b\x32%.tensorflow.SavedBareConcreteFunctionH\x00\x12-\n\x08\x63onstant\x18\t \x01(\x0b\x32\x19.tensorflow.SavedConstantH\x00\x12-\n\x08resource\x18\n \x01(\x0b\x32\x19.tensorflow.SavedResourceH\x00\x12\x35\n\x0f\x63\x61ptured_tensor\x18\x0c \x01(\x0b\x32\x1a.tensorflow.CapturedTensorH\x00\x12\x46\n\x10saveable_objects\x18\x0b \x03(\x0b\x32,.tensorflow.SavedObject.SaveableObjectsEntry\x12\x17\n\x0fregistered_name\x18\r \x01(\t\x12\x33\n\x15serialized_user_proto\x18\x0e \x01(\x0b\x32\x14.google.protobuf.Any\x12\x18\n\x10registered_saver\x18\x10 \x01(\t\x1aR\n\x14SaveableObjectsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.tensorflow.SaveableObject:\x02\x38\x01\x42\x06\n\x04kindJ\x04\x08\x02\x10\x03R\nattributes"d\n\x0fSavedUserObject\x12\x12\n\nidentifier\x18\x01 \x01(\t\x12\'\n\x07version\x18\x02 \x01(\x0b\x32\x16.tensorflow.VersionDef\x12\x14\n\x08metadata\x18\x03 \x01(\tB\x02\x18\x01"*\n\nSavedAsset\x12\x1c\n\x14\x61sset_file_def_index\x18\x01 \x01(\x05"\\\n\rSavedFunction\x12\x1a\n\x12\x63oncrete_functions\x18\x01 \x03(\t\x12/\n\rfunction_spec\x18\x02 \x01(\x0b\x32\x18.tensorflow.FunctionSpec"9\n\x0e\x43\x61pturedTensor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11\x63oncrete_function\x18\x02 \x01(\t"\xa8\x01\n\x15SavedConcreteFunction\x12\x14\n\x0c\x62ound_inputs\x18\x02 \x03(\x05\x12\x42\n\x1d\x63\x61nonicalized_input_signature\x18\x03 \x01(\x0b\x32\x1b.tensorflow.StructuredValue\x12\x35\n\x10output_signature\x18\x04 \x01(\x0b\x32\x1b.tensorflow.StructuredValue"\xad\x01\n\x19SavedBareConcreteFunction\x12\x1e\n\x16\x63oncrete_function_name\x18\x01 \x01(\t\x12\x19\n\x11\x61rgument_keywords\x18\x02 \x03(\t\x12$\n\x1c\x61llowed_positional_arguments\x18\x03 \x01(\x03\x12/\n\rfunction_spec\x18\x04 \x01(\x0b\x32\x18.tensorflow.FunctionSpec""\n\rSavedConstant\x12\x11\n\toperation\x18\x01 \x01(\t"\xd7\x02\n\rSavedVariable\x12#\n\x05\x64type\x18\x01 \x01(\x0e\x32\x14.tensorflow.DataType\x12+\n\x05shape\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12\x11\n\ttrainable\x18\x03 \x01(\x08\x12<\n\x0fsynchronization\x18\x04 \x01(\x0e\x32#.tensorflow.VariableSynchronization\x12\x34\n\x0b\x61ggregation\x18\x05 \x01(\x0e\x32\x1f.tensorflow.VariableAggregation\x12\x0c\n\x04name\x18\x06 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x07 \x01(\t\x12O\n,experimental_distributed_variable_components\x18\x08 \x03(\x0b\x32\x19.tensorflow.SavedVariable"\xfb\x01\n\x0c\x46unctionSpec\x12\x30\n\x0b\x66ullargspec\x18\x01 \x01(\x0b\x32\x1b.tensorflow.StructuredValue\x12\x11\n\tis_method\x18\x02 \x01(\x08\x12\x34\n\x0finput_signature\x18\x05 \x01(\x0b\x32\x1b.tensorflow.StructuredValue\x12\x38\n\x0bjit_compile\x18\x06 \x01(\x0e\x32#.tensorflow.FunctionSpec.JitCompile"*\n\nJitCompile\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x06\n\x02ON\x10\x01\x12\x07\n\x03OFF\x10\x02J\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05"\x1f\n\rSavedResource\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t"A\n\x0eSaveableObject\x12\x15\n\rsave_function\x18\x02 \x01(\x05\x12\x18\n\x10restore_function\x18\x03 \x01(\x05\x42ZZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.protobuf.saved_object_graph_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001" + _SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._options = None + _SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._serialized_options = b"8\001" + _SAVEDOBJECT_SAVEABLEOBJECTSENTRY._options = None + _SAVEDOBJECT_SAVEABLEOBJECTSENTRY._serialized_options = b"8\001" + _SAVEDUSEROBJECT.fields_by_name["metadata"]._options = None + _SAVEDUSEROBJECT.fields_by_name["metadata"]._serialized_options = b"\030\001" + _SAVEDOBJECTGRAPH._serialized_start = 356 + _SAVEDOBJECTGRAPH._serialized_end = 588 + _SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._serialized_start = 497 + _SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._serialized_end = 588 + _SAVEDOBJECT._serialized_start = 591 + _SAVEDOBJECT._serialized_end = 1567 + _SAVEDOBJECT_SAVEABLEOBJECTSENTRY._serialized_start = 1459 + _SAVEDOBJECT_SAVEABLEOBJECTSENTRY._serialized_end = 1541 + _SAVEDUSEROBJECT._serialized_start = 1569 + _SAVEDUSEROBJECT._serialized_end = 1669 + _SAVEDASSET._serialized_start = 1671 + _SAVEDASSET._serialized_end = 1713 + _SAVEDFUNCTION._serialized_start = 1715 + _SAVEDFUNCTION._serialized_end = 1807 + _CAPTUREDTENSOR._serialized_start = 1809 + _CAPTUREDTENSOR._serialized_end = 1866 + _SAVEDCONCRETEFUNCTION._serialized_start = 1869 + _SAVEDCONCRETEFUNCTION._serialized_end = 2037 + _SAVEDBARECONCRETEFUNCTION._serialized_start = 2040 + _SAVEDBARECONCRETEFUNCTION._serialized_end = 2213 + _SAVEDCONSTANT._serialized_start = 2215 + _SAVEDCONSTANT._serialized_end = 2249 + _SAVEDVARIABLE._serialized_start = 2252 + _SAVEDVARIABLE._serialized_end = 2595 + _FUNCTIONSPEC._serialized_start = 2598 + _FUNCTIONSPEC._serialized_end = 2849 + _FUNCTIONSPEC_JITCOMPILE._serialized_start = 2795 + _FUNCTIONSPEC_JITCOMPILE._serialized_end = 2837 + _SAVEDRESOURCE._serialized_start = 2851 + _SAVEDRESOURCE._serialized_end = 2882 + _SAVEABLEOBJECT._serialized_start = 2884 + _SAVEABLEOBJECT._serialized_end = 2949 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/saver_pb2.py b/modelscan/vendored/saver_pb2.py new file mode 100644 index 00000000..4968c8aa --- /dev/null +++ b/modelscan/vendored/saver_pb2.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/protobuf/saver.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n$tensorflow/core/protobuf/saver.proto\x12\ntensorflow"\x9e\x02\n\x08SaverDef\x12\x1c\n\x14\x66ilename_tensor_name\x18\x01 \x01(\t\x12\x18\n\x10save_tensor_name\x18\x02 \x01(\t\x12\x17\n\x0frestore_op_name\x18\x03 \x01(\t\x12\x13\n\x0bmax_to_keep\x18\x04 \x01(\x05\x12\x0f\n\x07sharded\x18\x05 \x01(\x08\x12%\n\x1dkeep_checkpoint_every_n_hours\x18\x06 \x01(\x02\x12=\n\x07version\x18\x07 \x01(\x0e\x32,.tensorflow.SaverDef.CheckpointFormatVersion"5\n\x17\x43heckpointFormatVersion\x12\n\n\x06LEGACY\x10\x00\x12\x06\n\x02V1\x10\x01\x12\x06\n\x02V2\x10\x02\x42~\n\x13org.tensorflow.utilB\x0bSaverProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.protobuf.saver_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\023org.tensorflow.utilB\013SaverProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001" + _SAVERDEF._serialized_start = 53 + _SAVERDEF._serialized_end = 339 + _SAVERDEF_CHECKPOINTFORMATVERSION._serialized_start = 286 + _SAVERDEF_CHECKPOINTFORMATVERSION._serialized_end = 339 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/struct_pb2.py b/modelscan/vendored/struct_pb2.py new file mode 100644 index 00000000..98a6e0eb --- /dev/null +++ b/modelscan/vendored/struct_pb2.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/protobuf/struct.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + tensor_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__pb2, +) +from modelscan.vendored import ( + tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2, +) +from modelscan.vendored import ( + types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n%tensorflow/core/protobuf/struct.proto\x12\ntensorflow\x1a&tensorflow/core/framework/tensor.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto"\xf1\x05\n\x0fStructuredValue\x12+\n\nnone_value\x18\x01 \x01(\x0b\x32\x15.tensorflow.NoneValueH\x00\x12\x17\n\rfloat64_value\x18\x0b \x01(\x01H\x00\x12\x15\n\x0bint64_value\x18\x0c \x01(\x12H\x00\x12\x16\n\x0cstring_value\x18\r \x01(\tH\x00\x12\x14\n\nbool_value\x18\x0e \x01(\x08H\x00\x12:\n\x12tensor_shape_value\x18\x1f \x01(\x0b\x32\x1c.tensorflow.TensorShapeProtoH\x00\x12\x32\n\x12tensor_dtype_value\x18 \x01(\x0e\x32\x14.tensorflow.DataTypeH\x00\x12\x38\n\x11tensor_spec_value\x18! \x01(\x0b\x32\x1b.tensorflow.TensorSpecProtoH\x00\x12\x34\n\x0ftype_spec_value\x18" \x01(\x0b\x32\x19.tensorflow.TypeSpecProtoH\x00\x12G\n\x19\x62ounded_tensor_spec_value\x18# \x01(\x0b\x32".tensorflow.BoundedTensorSpecProtoH\x00\x12+\n\nlist_value\x18\x33 \x01(\x0b\x32\x15.tensorflow.ListValueH\x00\x12-\n\x0btuple_value\x18\x34 \x01(\x0b\x32\x16.tensorflow.TupleValueH\x00\x12+\n\ndict_value\x18\x35 \x01(\x0b\x32\x15.tensorflow.DictValueH\x00\x12\x38\n\x11named_tuple_value\x18\x36 \x01(\x0b\x32\x1b.tensorflow.NamedTupleValueH\x00\x12/\n\x0ctensor_value\x18\x37 \x01(\x0b\x32\x17.tensorflow.TensorProtoH\x00\x12.\n\x0bnumpy_value\x18\x38 \x01(\x0b\x32\x17.tensorflow.TensorProtoH\x00\x42\x06\n\x04kind"\x0b\n\tNoneValue"8\n\tListValue\x12+\n\x06values\x18\x01 \x03(\x0b\x32\x1b.tensorflow.StructuredValue"9\n\nTupleValue\x12+\n\x06values\x18\x01 \x03(\x0b\x32\x1b.tensorflow.StructuredValue"\x8a\x01\n\tDictValue\x12\x31\n\x06\x66ields\x18\x01 \x03(\x0b\x32!.tensorflow.DictValue.FieldsEntry\x1aJ\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.tensorflow.StructuredValue:\x02\x38\x01"D\n\tPairValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.tensorflow.StructuredValue"F\n\x0fNamedTupleValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12%\n\x06values\x18\x02 \x03(\x0b\x32\x15.tensorflow.PairValue"q\n\x0fTensorSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05shape\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12#\n\x05\x64type\x18\x03 \x01(\x0e\x32\x14.tensorflow.DataType"\xcc\x01\n\x16\x42oundedTensorSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05shape\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12#\n\x05\x64type\x18\x03 \x01(\x0e\x32\x14.tensorflow.DataType\x12(\n\x07minimum\x18\x04 \x01(\x0b\x32\x17.tensorflow.TensorProto\x12(\n\x07maximum\x18\x05 \x01(\x0b\x32\x17.tensorflow.TensorProto"\xf8\x03\n\rTypeSpecProto\x12@\n\x0ftype_spec_class\x18\x01 \x01(\x0e\x32\'.tensorflow.TypeSpecProto.TypeSpecClass\x12/\n\ntype_state\x18\x02 \x01(\x0b\x32\x1b.tensorflow.StructuredValue\x12\x1c\n\x14type_spec_class_name\x18\x03 \x01(\t\x12\x1b\n\x13num_flat_components\x18\x04 \x01(\x05"\xb8\x02\n\rTypeSpecClass\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x16\n\x12SPARSE_TENSOR_SPEC\x10\x01\x12\x17\n\x13INDEXED_SLICES_SPEC\x10\x02\x12\x16\n\x12RAGGED_TENSOR_SPEC\x10\x03\x12\x15\n\x11TENSOR_ARRAY_SPEC\x10\x04\x12\x15\n\x11\x44\x41TA_DATASET_SPEC\x10\x05\x12\x16\n\x12\x44\x41TA_ITERATOR_SPEC\x10\x06\x12\x11\n\rOPTIONAL_SPEC\x10\x07\x12\x14\n\x10PER_REPLICA_SPEC\x10\x08\x12\x11\n\rVARIABLE_SPEC\x10\t\x12\x16\n\x12ROW_PARTITION_SPEC\x10\n\x12\x18\n\x14REGISTERED_TYPE_SPEC\x10\x0c\x12\x17\n\x13\x45XTENSION_TYPE_SPEC\x10\r"\x04\x08\x0b\x10\x0b\x42WZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_protob\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.protobuf.struct_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto" + _DICTVALUE_FIELDSENTRY._options = None + _DICTVALUE_FIELDSENTRY._serialized_options = b"8\001" + _STRUCTUREDVALUE._serialized_start = 179 + _STRUCTUREDVALUE._serialized_end = 932 + _NONEVALUE._serialized_start = 934 + _NONEVALUE._serialized_end = 945 + _LISTVALUE._serialized_start = 947 + _LISTVALUE._serialized_end = 1003 + _TUPLEVALUE._serialized_start = 1005 + _TUPLEVALUE._serialized_end = 1062 + _DICTVALUE._serialized_start = 1065 + _DICTVALUE._serialized_end = 1203 + _DICTVALUE_FIELDSENTRY._serialized_start = 1129 + _DICTVALUE_FIELDSENTRY._serialized_end = 1203 + _PAIRVALUE._serialized_start = 1205 + _PAIRVALUE._serialized_end = 1273 + _NAMEDTUPLEVALUE._serialized_start = 1275 + _NAMEDTUPLEVALUE._serialized_end = 1345 + _TENSORSPECPROTO._serialized_start = 1347 + _TENSORSPECPROTO._serialized_end = 1460 + _BOUNDEDTENSORSPECPROTO._serialized_start = 1463 + _BOUNDEDTENSORSPECPROTO._serialized_end = 1667 + _TYPESPECPROTO._serialized_start = 1670 + _TYPESPECPROTO._serialized_end = 2174 + _TYPESPECPROTO_TYPESPECCLASS._serialized_start = 1862 + _TYPESPECPROTO_TYPESPECCLASS._serialized_end = 2174 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/tensor_pb2.py b/modelscan/vendored/tensor_pb2.py new file mode 100644 index 00000000..04d636ba --- /dev/null +++ b/modelscan/vendored/tensor_pb2.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/tensor.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from modelscan.vendored import ( + resource_handle_pb2 as tensorflow_dot_core_dot_framework_dot_resource__handle__pb2, +) +from modelscan.vendored import ( + tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2, +) +from modelscan.vendored import ( + types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n&tensorflow/core/framework/tensor.proto\x12\ntensorflow\x1a/tensorflow/core/framework/resource_handle.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto"\xa0\x04\n\x0bTensorProto\x12#\n\x05\x64type\x18\x01 \x01(\x0e\x32\x14.tensorflow.DataType\x12\x32\n\x0ctensor_shape\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12\x16\n\x0eversion_number\x18\x03 \x01(\x05\x12\x16\n\x0etensor_content\x18\x04 \x01(\x0c\x12\x14\n\x08half_val\x18\r \x03(\x05\x42\x02\x10\x01\x12\x15\n\tfloat_val\x18\x05 \x03(\x02\x42\x02\x10\x01\x12\x16\n\ndouble_val\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x13\n\x07int_val\x18\x07 \x03(\x05\x42\x02\x10\x01\x12\x12\n\nstring_val\x18\x08 \x03(\x0c\x12\x18\n\x0cscomplex_val\x18\t \x03(\x02\x42\x02\x10\x01\x12\x15\n\tint64_val\x18\n \x03(\x03\x42\x02\x10\x01\x12\x14\n\x08\x62ool_val\x18\x0b \x03(\x08\x42\x02\x10\x01\x12\x18\n\x0c\x64\x63omplex_val\x18\x0c \x03(\x01\x42\x02\x10\x01\x12<\n\x13resource_handle_val\x18\x0e \x03(\x0b\x32\x1f.tensorflow.ResourceHandleProto\x12\x37\n\x0bvariant_val\x18\x0f \x03(\x0b\x32".tensorflow.VariantTensorDataProto\x12\x16\n\nuint32_val\x18\x10 \x03(\rB\x02\x10\x01\x12\x16\n\nuint64_val\x18\x11 \x03(\x04\x42\x02\x10\x01\x12\x12\n\nfloat8_val\x18\x12 \x01(\x0c"g\n\x16VariantTensorDataProto\x12\x11\n\ttype_name\x18\x01 \x01(\t\x12\x10\n\x08metadata\x18\x02 \x01(\x0c\x12(\n\x07tensors\x18\x03 \x03(\x0b\x32\x17.tensorflow.TensorProtoB|\n\x18org.tensorflow.frameworkB\x0cTensorProtosP\x01ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.tensor_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\014TensorProtosP\001ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_go_proto\370\001\001" + _TENSORPROTO.fields_by_name["half_val"]._options = None + _TENSORPROTO.fields_by_name["half_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["float_val"]._options = None + _TENSORPROTO.fields_by_name["float_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["double_val"]._options = None + _TENSORPROTO.fields_by_name["double_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["int_val"]._options = None + _TENSORPROTO.fields_by_name["int_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["scomplex_val"]._options = None + _TENSORPROTO.fields_by_name["scomplex_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["int64_val"]._options = None + _TENSORPROTO.fields_by_name["int64_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["bool_val"]._options = None + _TENSORPROTO.fields_by_name["bool_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["dcomplex_val"]._options = None + _TENSORPROTO.fields_by_name["dcomplex_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["uint32_val"]._options = None + _TENSORPROTO.fields_by_name["uint32_val"]._serialized_options = b"\020\001" + _TENSORPROTO.fields_by_name["uint64_val"]._options = None + _TENSORPROTO.fields_by_name["uint64_val"]._serialized_options = b"\020\001" + _TENSORPROTO._serialized_start = 189 + _TENSORPROTO._serialized_end = 733 + _VARIANTTENSORDATAPROTO._serialized_start = 735 + _VARIANTTENSORDATAPROTO._serialized_end = 838 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/tensor_shape_pb2.py b/modelscan/vendored/tensor_shape_pb2.py new file mode 100644 index 00000000..cf74af00 --- /dev/null +++ b/modelscan/vendored/tensor_shape_pb2.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/tensor_shape.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n,tensorflow/core/framework/tensor_shape.proto\x12\ntensorflow"z\n\x10TensorShapeProto\x12-\n\x03\x64im\x18\x02 \x03(\x0b\x32 .tensorflow.TensorShapeProto.Dim\x12\x14\n\x0cunknown_rank\x18\x03 \x01(\x08\x1a!\n\x03\x44im\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\tB\x87\x01\n\x18org.tensorflow.frameworkB\x11TensorShapeProtosP\x01ZSgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_shape_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.tensor_shape_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\021TensorShapeProtosP\001ZSgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_shape_go_proto\370\001\001" + _TENSORSHAPEPROTO._serialized_start = 60 + _TENSORSHAPEPROTO._serialized_end = 182 + _TENSORSHAPEPROTO_DIM._serialized_start = 149 + _TENSORSHAPEPROTO_DIM._serialized_end = 182 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/trackable_object_graph_pb2.py b/modelscan/vendored/trackable_object_graph_pb2.py new file mode 100644 index 00000000..4aff9e6a --- /dev/null +++ b/modelscan/vendored/trackable_object_graph_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/protobuf/trackable_object_graph.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n5tensorflow/core/protobuf/trackable_object_graph.proto\x12\ntensorflow\x1a\x1egoogle/protobuf/wrappers.proto"\xf3\x05\n\x14TrackableObjectGraph\x12?\n\x05nodes\x18\x01 \x03(\x0b\x32\x30.tensorflow.TrackableObjectGraph.TrackableObject\x1a\x99\x05\n\x0fTrackableObject\x12R\n\x08\x63hildren\x18\x01 \x03(\x0b\x32@.tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference\x12U\n\nattributes\x18\x02 \x03(\x0b\x32\x41.tensorflow.TrackableObjectGraph.TrackableObject.SerializedTensor\x12^\n\x0eslot_variables\x18\x03 \x03(\x0b\x32\x46.tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference\x12\x35\n\x10registered_saver\x18\x04 \x01(\x0b\x32\x1b.tensorflow.RegisteredSaver\x12\x39\n\x15has_checkpoint_values\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1a\x36\n\x0fObjectReference\x12\x0f\n\x07node_id\x18\x01 \x01(\x05\x12\x12\n\nlocal_name\x18\x02 \x01(\t\x1a\x63\n\x10SerializedTensor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\x16\n\x0e\x63heckpoint_key\x18\x03 \x01(\tJ\x04\x08\x04\x10\x05R\x10optional_restore\x1al\n\x15SlotVariableReference\x12!\n\x19original_variable_node_id\x18\x01 \x01(\x05\x12\x11\n\tslot_name\x18\x02 \x01(\t\x12\x1d\n\x15slot_variable_node_id\x18\x03 \x01(\x05"4\n\x0fRegisteredSaver\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bobject_name\x18\x02 \x01(\tBZZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.protobuf.trackable_object_graph_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001" + _TRACKABLEOBJECTGRAPH._serialized_start = 102 + _TRACKABLEOBJECTGRAPH._serialized_end = 857 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT._serialized_start = 192 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT._serialized_end = 857 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_OBJECTREFERENCE._serialized_start = 592 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_OBJECTREFERENCE._serialized_end = 646 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SERIALIZEDTENSOR._serialized_start = 648 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SERIALIZEDTENSOR._serialized_end = 747 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SLOTVARIABLEREFERENCE._serialized_start = 749 + _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SLOTVARIABLEREFERENCE._serialized_end = 857 + _REGISTEREDSAVER._serialized_start = 859 + _REGISTEREDSAVER._serialized_end = 911 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/types_pb2.py b/modelscan/vendored/types_pb2.py new file mode 100644 index 00000000..c3abcbbc --- /dev/null +++ b/modelscan/vendored/types_pb2.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/types.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n%tensorflow/core/framework/types.proto\x12\ntensorflow"9\n\x0fSerializedDType\x12&\n\x08\x64\x61tatype\x18\x01 \x01(\x0e\x32\x14.tensorflow.DataType*\xc6\x07\n\x08\x44\x61taType\x12\x0e\n\nDT_INVALID\x10\x00\x12\x0c\n\x08\x44T_FLOAT\x10\x01\x12\r\n\tDT_DOUBLE\x10\x02\x12\x0c\n\x08\x44T_INT32\x10\x03\x12\x0c\n\x08\x44T_UINT8\x10\x04\x12\x0c\n\x08\x44T_INT16\x10\x05\x12\x0b\n\x07\x44T_INT8\x10\x06\x12\r\n\tDT_STRING\x10\x07\x12\x10\n\x0c\x44T_COMPLEX64\x10\x08\x12\x0c\n\x08\x44T_INT64\x10\t\x12\x0b\n\x07\x44T_BOOL\x10\n\x12\x0c\n\x08\x44T_QINT8\x10\x0b\x12\r\n\tDT_QUINT8\x10\x0c\x12\r\n\tDT_QINT32\x10\r\x12\x0f\n\x0b\x44T_BFLOAT16\x10\x0e\x12\r\n\tDT_QINT16\x10\x0f\x12\x0e\n\nDT_QUINT16\x10\x10\x12\r\n\tDT_UINT16\x10\x11\x12\x11\n\rDT_COMPLEX128\x10\x12\x12\x0b\n\x07\x44T_HALF\x10\x13\x12\x0f\n\x0b\x44T_RESOURCE\x10\x14\x12\x0e\n\nDT_VARIANT\x10\x15\x12\r\n\tDT_UINT32\x10\x16\x12\r\n\tDT_UINT64\x10\x17\x12\x12\n\x0e\x44T_FLOAT8_E5M2\x10\x18\x12\x14\n\x10\x44T_FLOAT8_E4M3FN\x10\x19\x12\x0b\n\x07\x44T_INT4\x10\x1d\x12\x0c\n\x08\x44T_UINT4\x10\x1e\x12\x10\n\x0c\x44T_FLOAT_REF\x10\x65\x12\x11\n\rDT_DOUBLE_REF\x10\x66\x12\x10\n\x0c\x44T_INT32_REF\x10g\x12\x10\n\x0c\x44T_UINT8_REF\x10h\x12\x10\n\x0c\x44T_INT16_REF\x10i\x12\x0f\n\x0b\x44T_INT8_REF\x10j\x12\x11\n\rDT_STRING_REF\x10k\x12\x14\n\x10\x44T_COMPLEX64_REF\x10l\x12\x10\n\x0c\x44T_INT64_REF\x10m\x12\x0f\n\x0b\x44T_BOOL_REF\x10n\x12\x10\n\x0c\x44T_QINT8_REF\x10o\x12\x11\n\rDT_QUINT8_REF\x10p\x12\x11\n\rDT_QINT32_REF\x10q\x12\x13\n\x0f\x44T_BFLOAT16_REF\x10r\x12\x11\n\rDT_QINT16_REF\x10s\x12\x12\n\x0e\x44T_QUINT16_REF\x10t\x12\x11\n\rDT_UINT16_REF\x10u\x12\x15\n\x11\x44T_COMPLEX128_REF\x10v\x12\x0f\n\x0b\x44T_HALF_REF\x10w\x12\x13\n\x0f\x44T_RESOURCE_REF\x10x\x12\x12\n\x0e\x44T_VARIANT_REF\x10y\x12\x11\n\rDT_UINT32_REF\x10z\x12\x11\n\rDT_UINT64_REF\x10{\x12\x16\n\x12\x44T_FLOAT8_E5M2_REF\x10|\x12\x18\n\x14\x44T_FLOAT8_E4M3FN_REF\x10}\x12\x10\n\x0b\x44T_INT4_REF\x10\x81\x01\x12\x11\n\x0c\x44T_UINT4_REF\x10\x82\x01\x42z\n\x18org.tensorflow.frameworkB\x0bTypesProtosP\x01ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/types_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.types_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\013TypesProtosP\001ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/types_go_proto\370\001\001" + _DATATYPE._serialized_start = 113 + _DATATYPE._serialized_end = 1079 + _SERIALIZEDDTYPE._serialized_start = 53 + _SERIALIZEDDTYPE._serialized_end = 110 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/variable_pb2.py b/modelscan/vendored/variable_pb2.py new file mode 100644 index 00000000..baeb9514 --- /dev/null +++ b/modelscan/vendored/variable_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/variable.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n(tensorflow/core/framework/variable.proto\x12\ntensorflow"\xc8\x02\n\x0bVariableDef\x12\x15\n\rvariable_name\x18\x01 \x01(\t\x12\x1a\n\x12initial_value_name\x18\x06 \x01(\t\x12\x18\n\x10initializer_name\x18\x02 \x01(\t\x12\x15\n\rsnapshot_name\x18\x03 \x01(\t\x12\x39\n\x13save_slice_info_def\x18\x04 \x01(\x0b\x32\x1c.tensorflow.SaveSliceInfoDef\x12\x13\n\x0bis_resource\x18\x05 \x01(\x08\x12\x11\n\ttrainable\x18\x07 \x01(\x08\x12<\n\x0fsynchronization\x18\x08 \x01(\x0e\x32#.tensorflow.VariableSynchronization\x12\x34\n\x0b\x61ggregation\x18\t \x01(\x0e\x32\x1f.tensorflow.VariableAggregation"`\n\x10SaveSliceInfoDef\x12\x11\n\tfull_name\x18\x01 \x01(\t\x12\x12\n\nfull_shape\x18\x02 \x03(\x03\x12\x12\n\nvar_offset\x18\x03 \x03(\x03\x12\x11\n\tvar_shape\x18\x04 \x03(\x03*\xac\x01\n\x17VariableSynchronization\x12!\n\x1dVARIABLE_SYNCHRONIZATION_AUTO\x10\x00\x12!\n\x1dVARIABLE_SYNCHRONIZATION_NONE\x10\x01\x12%\n!VARIABLE_SYNCHRONIZATION_ON_WRITE\x10\x02\x12$\n VARIABLE_SYNCHRONIZATION_ON_READ\x10\x03*\x9e\x01\n\x13VariableAggregation\x12\x1d\n\x19VARIABLE_AGGREGATION_NONE\x10\x00\x12\x1c\n\x18VARIABLE_AGGREGATION_SUM\x10\x01\x12\x1d\n\x19VARIABLE_AGGREGATION_MEAN\x10\x02\x12+\n\'VARIABLE_AGGREGATION_ONLY_FIRST_REPLICA\x10\x03\x42\x80\x01\n\x18org.tensorflow.frameworkB\x0eVariableProtosP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/variable_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.variable_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\016VariableProtosP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/variable_go_proto\370\001\001" + _VARIABLESYNCHRONIZATION._serialized_start = 486 + _VARIABLESYNCHRONIZATION._serialized_end = 658 + _VARIABLEAGGREGATION._serialized_start = 661 + _VARIABLEAGGREGATION._serialized_end = 819 + _VARIABLEDEF._serialized_start = 57 + _VARIABLEDEF._serialized_end = 385 + _SAVESLICEINFODEF._serialized_start = 387 + _SAVESLICEINFODEF._serialized_end = 483 +# @@protoc_insertion_point(module_scope) diff --git a/modelscan/vendored/versions_pb2.py b/modelscan/vendored/versions_pb2.py new file mode 100644 index 00000000..8772964a --- /dev/null +++ b/modelscan/vendored/versions_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/framework/versions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n(tensorflow/core/framework/versions.proto\x12\ntensorflow"K\n\nVersionDef\x12\x10\n\x08producer\x18\x01 \x01(\x05\x12\x14\n\x0cmin_consumer\x18\x02 \x01(\x05\x12\x15\n\rbad_consumers\x18\x03 \x03(\x05\x42\x80\x01\n\x18org.tensorflow.frameworkB\x0eVersionsProtosP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/versions_go_proto\xf8\x01\x01\x62\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "tensorflow.core.framework.versions_pb2", globals() +) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\030org.tensorflow.frameworkB\016VersionsProtosP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/versions_go_proto\370\001\001" + _VERSIONDEF._serialized_start = 56 + _VERSIONDEF._serialized_end = 131 +# @@protoc_insertion_point(module_scope) diff --git a/poetry.lock b/poetry.lock index 1f4d4746..6358146b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6,12 +6,11 @@ version = "2.1.0" description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." optional = false python-versions = ">=3.7" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "absl-py-2.1.0.tar.gz", hash = "sha256:7820790efbb316739cde8b4e19357243fc3608a152024288513dd968d7d959ff"}, {file = "absl_py-2.1.0-py3-none-any.whl", hash = "sha256:526a04eadab8b4ee719ce68f204172ead1027549089702d99b9059f129ff1308"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "aiohappyeyeballs" @@ -155,12 +154,11 @@ version = "1.6.3" description = "An AST unparser for Python" optional = false python-versions = "*" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] six = ">=1.6.1,<2.0" @@ -278,7 +276,7 @@ version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, @@ -302,7 +300,7 @@ version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -506,12 +504,11 @@ version = "24.3.25" description = "The FlatBuffers serialization format for Python" optional = false python-versions = "*" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "frozenlist" @@ -646,12 +643,11 @@ version = "0.6.0" description = "Python AST that abstracts the underlying Python version" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "gast-0.6.0-py3-none-any.whl", hash = "sha256:52b182313f7330389f72b069ba00f174cfe2a06411099547288839c6cbafbd54"}, {file = "gast-0.6.0.tar.gz", hash = "sha256:88fc5300d32c7ac6ca7b515310862f71e6fdf2c029bbec7c66c0f5dd47b6b1fb"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "google-pasta" @@ -659,13 +655,12 @@ version = "0.2.0" description = "pasta is an AST-based Python refactoring library" optional = false python-versions = "*" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "google-pasta-0.2.0.tar.gz", hash = "sha256:c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e"}, {file = "google_pasta-0.2.0-py2-none-any.whl", hash = "sha256:4612951da876b1a10fe3960d7226f0c7682cf901e16ac06e473b267a5afa8954"}, {file = "google_pasta-0.2.0-py3-none-any.whl", hash = "sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] six = "*" @@ -676,7 +671,7 @@ version = "1.65.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "grpcio-1.65.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062"}, {file = "grpcio-1.65.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8"}, @@ -725,7 +720,6 @@ files = [ {file = "grpcio-1.65.1-cp39-cp39-win_amd64.whl", hash = "sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c"}, {file = "grpcio-1.65.1.tar.gz", hash = "sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b"}, ] -markers = {main = "extra == \"tensorflow\""} [package.extras] protobuf = ["grpcio-tools (>=1.65.1)"] @@ -765,7 +759,7 @@ files = [ {file = "h5py-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:9c82ece71ed1c2b807b6628e3933bc6eae57ea21dac207dca3470e3ceaaf437c"}, {file = "h5py-3.13.0.tar.gz", hash = "sha256:1870e46518720023da85d0895a1960ff2ce398c5671eac3b1a41ec696b7105c3"}, ] -markers = {main = "extra == \"h5py\" or extra == \"tensorflow\""} +markers = {main = "extra == \"h5py\""} [package.dependencies] numpy = ">=1.19.3" @@ -791,7 +785,7 @@ version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, @@ -833,12 +827,11 @@ version = "3.11.3" description = "Multi-backend Keras" optional = false python-versions = ">=3.10" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "keras-3.11.3-py3-none-any.whl", hash = "sha256:f484f050e05ee400455b05ec8c36ed35edc34de94256b6073f56cfe68f65491f"}, {file = "keras-3.11.3.tar.gz", hash = "sha256:efda616835c31b7d916d72303ef9adec1257320bc9fd4b2b0138840fc65fb5b7"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] absl-py = "*" @@ -856,7 +849,7 @@ version = "18.1.1" description = "Clang Python Bindings, mirrored from the official LLVM repo: https://github.com/llvm/llvm-project/tree/main/clang/bindings/python, to make the installation process easier." optional = false python-versions = "*" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "libclang-18.1.1-1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:0b2e143f0fac830156feb56f9231ff8338c20aecfe72b4ffe96f19e5a1dbb69a"}, {file = "libclang-18.1.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:6f14c3f194704e5d09769108f03185fce7acaf1d1ae4bbb2f30a72c2400cb7c5"}, @@ -869,7 +862,6 @@ files = [ {file = "libclang-18.1.1-py2.py3-none-win_arm64.whl", hash = "sha256:3f0e1f49f04d3cd198985fea0511576b0aee16f9ff0e0f0cad7f9c57ec3c20e8"}, {file = "libclang-18.1.1.tar.gz", hash = "sha256:a1214966d08d73d971287fc3ead8dfaf82eb07fb197680d8b3859dbbbbf78250"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "markdown" @@ -877,12 +869,11 @@ version = "3.6" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, ] -markers = {main = "extra == \"tensorflow\""} [package.extras] docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] @@ -919,7 +910,7 @@ version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -982,7 +973,6 @@ files = [ {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "mdurl" @@ -1002,7 +992,7 @@ version = "0.5.3" description = "ml_dtypes is a stand-alone implementation of several NumPy dtype extensions used in machine learning." optional = false python-versions = ">=3.9" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "ml_dtypes-0.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a1d68a7cb53e3f640b2b6a34d12c0542da3dd935e560fdf463c0c77f339fc20"}, {file = "ml_dtypes-0.5.3-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cd5a6c711b5350f3cbc2ac28def81cd1c580075ccb7955e61e9d8f4bfd40d24"}, @@ -1040,7 +1030,6 @@ files = [ {file = "ml_dtypes-0.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:a4f39b9bf6555fab9bfb536cf5fdd1c1c727e8d22312078702e9ff005354b37f"}, {file = "ml_dtypes-0.5.3.tar.gz", hash = "sha256:95ce33057ba4d05df50b1f3cfefab22e351868a843b3b15a46c65836283670c9"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] numpy = [ @@ -1242,12 +1231,11 @@ version = "0.0.8" description = "A simple utility to separate the implementation of your Python package and its public API surface." optional = false python-versions = "*" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "namex-0.0.8-py3-none-any.whl", hash = "sha256:7ddb6c2bb0e753a311b7590f84f6da659dd0c05e65cb89d519d54c0a250c0487"}, {file = "namex-0.0.8.tar.gz", hash = "sha256:32a50f6c565c0bb10aa76298c959507abdc0e850efe085dc38f3440fcb3aa90b"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "networkx" @@ -1563,12 +1551,11 @@ version = "3.3.0" description = "Optimizing numpys einsum function" optional = false python-versions = ">=3.5" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "opt_einsum-3.3.0-py3-none-any.whl", hash = "sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147"}, {file = "opt_einsum-3.3.0.tar.gz", hash = "sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] numpy = ">=1.7" @@ -1583,7 +1570,7 @@ version = "0.12.1" description = "Optimized PyTree Utilities." optional = false python-versions = ">=3.7" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "optree-0.12.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:349aafac463642979f7fe7ca3aa9e2fa8a5a0f81ef7af6946a075b797673e600"}, {file = "optree-0.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8046cbbcd5f7494ba7c6811e44a6d2867216f2bdb7cef980a9a62e31d39270c"}, @@ -1654,7 +1641,6 @@ files = [ {file = "optree-0.12.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b2fe5c04c218698a53ed2d4b7372f1989df8cf0a61d616e6f384770d8a5fb1c"}, {file = "optree-0.12.1.tar.gz", hash = "sha256:76a2240e7482355966a73c6c701e3d1f148420a77849c78d175d3b08bf06ff36"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] typing-extensions = ">=4.5.0" @@ -1674,12 +1660,11 @@ version = "24.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "prod", "test"] +groups = ["dev", "prod", "test"] files = [ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "pathspec" @@ -1711,7 +1696,7 @@ version = "11.3.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, @@ -1820,7 +1805,6 @@ files = [ {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"}, ] -markers = {main = "extra == \"tensorflow\""} [package.extras] docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] @@ -2009,7 +1993,6 @@ files = [ {file = "protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783"}, {file = "protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "pygments" @@ -2116,7 +2099,7 @@ version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, @@ -2179,12 +2162,11 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "stevedore" @@ -2225,11 +2207,10 @@ version = "2.20.0" description = "TensorBoard lets you watch Tensors Flow" optional = false python-versions = ">=3.9" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "tensorboard-2.20.0-py3-none-any.whl", hash = "sha256:9dc9f978cb84c0723acf9a345d96c184f0293d18f166bb8d59ee098e6cfaaba6"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] absl-py = ">=0.4" @@ -2249,13 +2230,12 @@ version = "0.7.2" description = "Fast data loading for TensorBoard" optional = false python-versions = ">=3.7" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "tensorboard_data_server-0.7.2-py3-none-any.whl", hash = "sha256:7e0610d205889588983836ec05dc098e80f97b7e7bbff7e994ebb78f578d0ddb"}, {file = "tensorboard_data_server-0.7.2-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:9fe5d24221b29625dbc7328b0436ca7fc1c23de4acf4d272f1180856e32f9f60"}, {file = "tensorboard_data_server-0.7.2-py3-none-manylinux_2_31_x86_64.whl", hash = "sha256:ef687163c24185ae9754ed5650eb5bc4d84ff257aabdc33f0cc6f74d8ba54530"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "tensorflow" @@ -2263,7 +2243,7 @@ version = "2.20.0" description = "TensorFlow is an open source machine learning framework for everyone." optional = false python-versions = ">=3.9" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "tensorflow-2.20.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e5f169f8f5130ab255bbe854c5f0ae152e93d3d1ac44f42cb1866003b81a5357"}, {file = "tensorflow-2.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02a0293d94f5c8b7125b66abf622cc4854a33ae9d618a0d41309f95e091bbaea"}, @@ -2286,7 +2266,6 @@ files = [ {file = "tensorflow-2.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0deb5c583dfc53b54fd158a194ce0087b406bb6518af400ca3809735e4548ec3"}, {file = "tensorflow-2.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd71a7e7c3270239f4185915e8f2c5d39608c5e18973d6e1d101b153993841eb"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] absl-py = ">=1.0.0" @@ -2321,12 +2300,11 @@ version = "2.4.0" description = "ANSI color formatting for output in terminal" optional = false python-versions = ">=3.8" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, ] -markers = {main = "extra == \"tensorflow\""} [package.extras] tests = ["pytest", "pytest-cov"] @@ -2484,7 +2462,7 @@ files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -markers = {main = "extra == \"tensorflow\" or python_version == \"3.10\"", dev = "python_version == \"3.10\""} +markers = {main = "python_version == \"3.10\"", dev = "python_version == \"3.10\""} [[package]] name = "urllib3" @@ -2492,7 +2470,7 @@ version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, @@ -2531,12 +2509,11 @@ version = "3.0.6" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17"}, {file = "werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d"}, ] -markers = {main = "extra == \"tensorflow\""} [package.dependencies] MarkupSafe = ">=2.1.1" @@ -2550,12 +2527,11 @@ version = "0.43.0" description = "A built-package format for Python" optional = false python-versions = ">=3.8" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, ] -markers = {main = "extra == \"tensorflow\""} [package.extras] test = ["pytest (>=6.0.0)", "setuptools (>=65)"] @@ -2566,7 +2542,7 @@ version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.6" -groups = ["main", "test"] +groups = ["test"] files = [ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, @@ -2639,7 +2615,6 @@ files = [ {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] -markers = {main = "extra == \"tensorflow\""} [[package]] name = "yarl" @@ -2740,9 +2715,8 @@ propcache = ">=0.2.0" [extras] h5py = ["h5py"] -tensorflow = ["tensorflow"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "daef07379045ec1b682eed2078810ccdd37a9e0de17bd2dd8d24fec7761150ab" +content-hash = "c0020f4daeee449b7191a53e0ef7fb80fa1c477f2304073abe1c659068306382" diff --git a/pyproject.toml b/pyproject.toml index 7f02c34f..77904e2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,12 +19,9 @@ rich = ">=13.4.2,<15.0.0" tomlkit = ">=0.12.3,<0.14.0" h5py = { version = "^3.9.0", optional = true } setuptools = "80.9.0" - -# TODO: Add py3.12 once TF release supports -tensorflow = { version = "^2.17", optional = true } +protobuf = ">=3.19.0" [tool.poetry.extras] -tensorflow = ["tensorflow"] h5py = ["h5py"] [tool.poetry.group.test.dependencies] @@ -42,7 +39,7 @@ tf-keras = "^2.20.1" [tool.poetry.group.dev.dependencies] dunamai = "^1.18.0" pre-commit = ">=3.3.3,<5.0.0" -black = ">=23.7,<26.0" +black = "22.8.0" [tool.poetry.group.prod.dependencies] dunamai = "^1.18.0" @@ -58,4 +55,8 @@ enable = true exclude_dirs = ["tests", "notebooks"] [tool.mypy] -exclude = ["notebooks"] +exclude = "vendored|notebooks" + +[[tool.mypy.overrides]] +module = "modelscan.vendored.*" +ignore_errors = true diff --git a/scripts/update_tensorflow_operators.py b/scripts/update_tensorflow_operators.py new file mode 100755 index 00000000..717cffd5 --- /dev/null +++ b/scripts/update_tensorflow_operators.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +""" +Script to update the TensorFlow safe operators list. + +This script imports TensorFlow, extracts all raw operators, and updates +the JSON file used by modelscan to identify safe TensorFlow operations. + +Usage: + python scripts/update_tensorflow_operators.py + +Requirements: + - TensorFlow must be installed + - Run from the project root directory +""" + +import json +import os +import sys +from pathlib import Path +from typing import List, Dict, Any + + +def update_operators_json(json_path: Path) -> None: + """ + Update the JSON file with new version and operators. + + Args: + json_path: Path to the JSON file to update + """ + try: + import tensorflow as tf + except ImportError: + print("ERROR: TensorFlow is not installed.") + print("Please install TensorFlow: pip install tensorflow") + sys.exit(1) + + version: str = tf.__version__ + operators: List[str] = [] + for op in tf.raw_ops.__dict__.keys(): + operators.append(op) + data: Dict[str, Any] = {} + data[ + "description" + ] = "List of known TensorFlow raw operators from tensorflow.raw_ops.__dict__.keys()" + data["version"] = version + data["operators"] = operators + + # Write updated file with nice formatting + with open(json_path, "w") as f: + json.dump(data, f, indent=2) + + +def main() -> None: + """Main entry point.""" + # Determine paths + script_dir = Path(__file__).parent + project_root = script_dir.parent + json_path = project_root / "modelscan" / "data" / "tensorflow_operators.json" + + update_operators_json(json_path) + + +if __name__ == "__main__": + main() diff --git a/tests/test_modelscan.py b/tests/test_modelscan.py index a82e4ecc..f3223695 100644 --- a/tests/test_modelscan.py +++ b/tests/test_modelscan.py @@ -1,32 +1,24 @@ import aiohttp import bdb +import dill import http.client import importlib import io import numpy as np import os -from pathlib import Path import pickle -import dill import pytest import requests import shutil import socket import subprocess import sys -import torch -import tensorflow as tf import tf_keras as keras -from typing import Any, List, Set, Dict -from test_utils import ( - generate_dill_unsafe_file, - generate_unsafe_pickle_file, - MaliciousModule, - PyTorchTestModel, -) +import torch import zipfile +from pathlib import Path +from typing import Any, List, Set, Dict -from modelscan.modelscan import ModelScan from modelscan.cli import cli from modelscan.issues import ( Issue, @@ -34,13 +26,19 @@ IssueSeverity, OperatorIssueDetails, ) +from modelscan.model import Model +from modelscan.modelscan import ModelScan +from modelscan.settings import DEFAULT_SETTINGS +from modelscan.skip import SkipCategories from modelscan.tools.picklescanner import ( scan_pickle_bytes, ) - -from modelscan.skip import SkipCategories -from modelscan.settings import DEFAULT_SETTINGS -from modelscan.model import Model +from test_utils import ( + generate_dill_unsafe_file, + generate_unsafe_pickle_file, + MaliciousModule, + PyTorchTestModel, +) settings: Dict[str, Any] = DEFAULT_SETTINGS