diff --git a/MLOps/serving_patterns/README.md b/MLOps/serving_patterns/README.md new file mode 100644 index 0000000..8b9e808 --- /dev/null +++ b/MLOps/serving_patterns/README.md @@ -0,0 +1,15 @@ +# Serving patterns + +.
+├── [chapter 4. 추론 시스템 만들기](./)
+   ├── [web single pattern](./web_single_pattern)
+   ├── [synchronous inference pattern](./synchronous_pattern)
+   ├── [asynchronous inference pattern](./asynchronous_pattern)
+   ├── [batc inference pattern](./batch_pattern)
+   ├── [preprocess and prediction pattern](./prep_pred_pattern)
+   ├── [horizontal microservice pattern](./horizontal_microservice_pattern)
+   ├── [sync and async pattern](./sync_async_pattern)
+   ├── [pattern cache pattern](./prediction_cache_pattern)
+   ├── [data cache pattern](./data_cache_pattern)
+   ├── [prediction template pattern](./template_pattern)
+   └── [Edge AI pattern](./edge_ai_pattern)
diff --git a/MLOps/serving_patterns/asynchronous_pattern/.dockerignore b/MLOps/serving_patterns/asynchronous_pattern/.dockerignore new file mode 100644 index 0000000..feab7e4 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/.dockerignore @@ -0,0 +1,12 @@ +dockerfile +Dockerfile +.dockerignore +log +tmp +*.sqlite3 +*.sqlite3-journal +__pycache__ +.pytest_cache +*.dvc +dvc.yaml +dvc.lock diff --git a/MLOps/serving_patterns/asynchronous_pattern/Dockerfile.backend b/MLOps/serving_patterns/asynchronous_pattern/Dockerfile.backend new file mode 100644 index 0000000..179397f --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/Dockerfile.backend @@ -0,0 +1,19 @@ +FROM python:3.8-slim + +ENV MODEL_DIR imagenet_inception_v3 +ENV PROJECT_DIR asynchronous_pattern +WORKDIR /${PROJECT_DIR} +ADD ./requirements_backend.txt /${PROJECT_DIR}/ +RUN apt-get -y update && \ + apt-get -y install apt-utils gcc && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip install --no-cache-dir -r requirements_backend.txt + +COPY ./src/ /${PROJECT_DIR}/src/ +COPY ./${MODEL_DIR}/data/ /${PROJECT_DIR}/data/ + +ENV LOG_LEVEL DEBUG +ENV LOG_FORMAT TEXT + +CMD [ "python", "-m", "src.app.backend.prediction_batch" ] diff --git a/MLOps/serving_patterns/asynchronous_pattern/Dockerfile.proxy b/MLOps/serving_patterns/asynchronous_pattern/Dockerfile.proxy new file mode 100644 index 0000000..e8a1128 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/Dockerfile.proxy @@ -0,0 +1,21 @@ +FROM python:3.8-slim + +ENV MODEL_DIR imagenet_inception_v3 +ENV PROJECT_DIR asynchronous_pattern +WORKDIR /${PROJECT_DIR} +ADD ./requirements_proxy.txt /${PROJECT_DIR}/ +RUN apt-get -y update && \ + apt-get -y install apt-utils gcc && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip install --no-cache-dir -r requirements_proxy.txt + +COPY ./src/ /${PROJECT_DIR}/src/ +COPY ./${MODEL_DIR}/data/ /${PROJECT_DIR}/data/ + +ENV LOG_LEVEL DEBUG +ENV LOG_FORMAT TEXT + +COPY ./run.sh /${PROJECT_DIR}/run.sh +RUN chmod +x /${PROJECT_DIR}/run.sh +CMD [ "./run.sh" ] diff --git a/MLOps/serving_patterns/asynchronous_pattern/README.md b/MLOps/serving_patterns/asynchronous_pattern/README.md new file mode 100644 index 0000000..413e0d6 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/README.md @@ -0,0 +1,169 @@ +# 비동기 추론 패턴 + +## 목적 + +비동기 추론 API 를 제공합니다. + +## 전제 + +- Python 3.8 이상 +- Docker +- Docker compose + +## 사용법 + +0. 현재 디렉토리 + +```sh +$ pwd +~/ml-system-in-actions/chapter4_serving_patterns/asynchronous_pattern +``` + +1. 비동기 추론을 위한 Docker 이미지 빌드 + +```sh +$ make build_all +# 실행 커맨드 +# docker build \ +# -t shibui/ml-system-in-actions:asynchronous_pattern_asynchronous_proxy_0.0.1 \ +# -f ./Dockerfile.proxy . +# docker build \ +# -t shibui/ml-system-in-actions:asynchronous_pattern_imagenet_inception_v3_0.0.1 \ +# -f ./imagenet_inception_v3/Dockerfile . +# docker build \ +# -t shibui/ml-system-in-actions:asynchronous_pattern_asynchronous_backend_0.0.1 \ +# -f ./Dockerfile.backend . +``` + +2. Docker compose 로 각 서비스 기동 + +```sh +$ make c_up +# 실행 커맨드 +# docker-compose \ +# -f ./docker-compose.yml \ +# up -d +``` + +3. 기동한 API 에 요청 + +```sh +# 헬스 체크 +$ curl localhost:8000/health +# 출력 +# {"health":"ok"} + +# 메타 데이터 +$ curl localhost:8000/metadata +# 출력 +# { +# "model_spec": { +# "name": "inception_v3", +# "signature_name": "", +# "version": "0" +# }, +# "metadata": { +# "signature_def": { +# "signature_def": { +# "serving_default": { +# "inputs": { +# "image": { +# "dtype": "DT_STRING", +# "tensor_shape": { +# "dim": [ +# { +# "size": "-1", +# "name": "" +# } +# ], +# "unknown_rank": false +# }, +# "name": "serving_default_image:0" +# } +# }, +# "outputs": { +# "output_0": { +# "dtype": "DT_STRING", +# "tensor_shape": { +# "dim": [], +# "unknown_rank": true +# }, +# "name": "StatefulPartitionedCall:0" +# } +# }, +# "method_name": "tensorflow/serving/predict" +# }, +# "__saved_model_init_op": { +# "inputs": {}, +# "outputs": { +# "__saved_model_init_op": { +# "dtype": "DT_INVALID", +# "tensor_shape": { +# "dim": [], +# "unknown_rank": true +# }, +# "name": "NoOp" +# } +# }, +# "method_name": "" +# } +# } +# } +# } +# } + +# 라벨 목록 +$ curl localhost:8000/label +# 출력 +# [ +# "background", +# "tench", +# "goldfish", +# ... +# "bolete", +# "ear", +# "toilet tissue" +# ] + +# 테스트 데이터로 추론 요청 +$ curl localhost:8000/predict/test +# 출력 +# { +# "job_id": "f22689" +# } + + +# 이미지를 요청 +$ (echo \ + -n '{"image_data": "'; \ + base64 imagenet_inception_v3/data/cat.jpg; \ + echo '"}') | \ + curl \ + -X POST \ + -H "Content-Type: application/json" \ + -d @- \ + localhost:8000/predict +# 출력 +# { +# "job_id":"2f49aa" +# } + +# 이미지 요청의 작업 ID 로부터 추론 결과를 요청 +$ curl localhost:8000/job/2f49aa +# 출력 +# { +# "2f49aa": { +# "prediction": "Siamese cat" +# } +# } +``` + +4. Docker compose 정지 + +```sh +$ make c_down +# 실행 커맨드 +# docker-compose \ +# -f ./docker-compose.yml \ +# down +``` diff --git a/MLOps/serving_patterns/asynchronous_pattern/__init__.py b/MLOps/serving_patterns/asynchronous_pattern/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/asynchronous_pattern/docker-compose.yml b/MLOps/serving_patterns/asynchronous_pattern/docker-compose.yml new file mode 100644 index 0000000..ef6875c --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/docker-compose.yml @@ -0,0 +1,48 @@ +version: "3" + +services: + asynchronous_proxy: + container_name: asynchronous_proxy + image: shibui/ml-system-in-actions:asynchronous_pattern_asynchronous_proxy_0.0.1 + restart: always + environment: + - PLATFORM=docker_compose + - QUEUE_NAME=tfs_queue + - API_ADDRESS=imagenet_inception_v3 + ports: + - "8000:8000" + command: ./run.sh + depends_on: + - redis + - imagenet_inception_v3 + - asynchronous_backend + + imagenet_inception_v3: + container_name: imagenet_inception_v3 + image: shibui/ml-system-in-actions:asynchronous_pattern_imagenet_inception_v3_0.0.1 + restart: always + environment: + - PORT=8500 + - REST_API_PORT=8501 + ports: + - "8500:8500" + - "8501:8501" + entrypoint: ["/usr/bin/tf_serving_entrypoint.sh"] + + asynchronous_backend: + container_name: asynchronous_backend + image: shibui/ml-system-in-actions:asynchronous_pattern_asynchronous_backend_0.0.1 + restart: always + environment: + - PLATFORM=docker_compose + - QUEUE_NAME=tfs_queue + - API_ADDRESS=imagenet_inception_v3 + entrypoint: ["python", "-m", "src.app.backend.prediction_batch"] + depends_on: + - redis + + redis: + container_name: asynchronous_redis + image: "redis:latest" + ports: + - "6379:6379" diff --git a/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/Dockerfile b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/Dockerfile new file mode 100644 index 0000000..6a77de2 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/Dockerfile @@ -0,0 +1,31 @@ +FROM tensorflow/tensorflow:2.5.1 as builder + +ARG SERVER_DIR=imagenet_inception_v3 +ENV PROJECT_DIR asynchronous_pattern +WORKDIR /${PROJECT_DIR} +ADD ./${SERVER_DIR}/requirements.txt /${PROJECT_DIR}/ + +COPY ./${SERVER_DIR}/extract_inception_v3.py /${PROJECT_DIR}/extract_inception_v3.py +COPY ./${SERVER_DIR}/data/image_net_labels.json /${PROJECT_DIR}/data/image_net_labels.json + +RUN apt-get -y update && \ + apt-get -y install apt-utils gcc && \ + pip install --no-cache-dir -r requirements.txt && \ + touch __init__.py && \ + python -m extract_inception_v3 + + +FROM tensorflow/serving:2.5.2 + +ARG SERVER_DIR=imagenet_inception_v3 +ENV PROJECT_DIR asynchronous_pattern +ENV MODEL_BASE_PATH /${PROJECT_DIR}/saved_model/inception_v3 +ENV MODEL_NAME inception_v3 + +COPY --from=builder /${PROJECT_DIR}/saved_model/inception_v3 ${MODEL_BASE_PATH} +EXPOSE 8500 +EXPOSE 8501 + +COPY ./${SERVER_DIR}/tf_serving_entrypoint.sh /usr/bin/tf_serving_entrypoint.sh +RUN chmod +x /usr/bin/tf_serving_entrypoint.sh +ENTRYPOINT ["/usr/bin/tf_serving_entrypoint.sh"] \ No newline at end of file diff --git a/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/__init__.py b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/data/cat.jpg b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/data/cat.jpg new file mode 100644 index 0000000..c4d4861 Binary files /dev/null and b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/data/cat.jpg differ diff --git a/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/data/image_net_labels.json b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/data/image_net_labels.json new file mode 100644 index 0000000..8b00a76 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/data/image_net_labels.json @@ -0,0 +1,1003 @@ +[ + "background", + "tench", + "goldfish", + "great white shark", + "tiger shark", + "hammerhead", + "electric ray", + "stingray", + "cock", + "hen", + "ostrich", + "brambling", + "goldfinch", + "house finch", + "junco", + "indigo bunting", + "robin", + "bulbul", + "jay", + "magpie", + "chickadee", + "water ouzel", + "kite", + "bald eagle", + "vulture", + "great grey owl", + "European fire salamander", + "common newt", + "eft", + "spotted salamander", + "axolotl", + "bullfrog", + "tree frog", + "tailed frog", + "loggerhead", + "leatherback turtle", + "mud turtle", + "terrapin", + "box turtle", + "banded gecko", + "common iguana", + "American chameleon", + "whiptail", + "agama", + "frilled lizard", + "alligator lizard", + "Gila monster", + "green lizard", + "African chameleon", + "Komodo dragon", + "African crocodile", + "American alligator", + "triceratops", + "thunder snake", + "ringneck snake", + "hognose snake", + "green snake", + "king snake", + "garter snake", + "water snake", + "vine snake", + "night snake", + "boa constrictor", + "rock python", + "Indian cobra", + "green mamba", + "sea snake", + "horned viper", + "diamondback", + "sidewinder", + "trilobite", + "harvestman", + "scorpion", + "black and gold garden spider", + "barn spider", + "garden spider", + "black widow", + "tarantula", + "wolf spider", + "tick", + "centipede", + "black grouse", + "ptarmigan", + "ruffed grouse", + "prairie chicken", + "peacock", + "quail", + "partridge", + "African grey", + "macaw", + "sulphur-crested cockatoo", + "lorikeet", + "coucal", + "bee eater", + "hornbill", + "hummingbird", + "jacamar", + "toucan", + "drake", + "red-breasted merganser", + "goose", + "black swan", + "tusker", + "echidna", + "platypus", + "wallaby", + "koala", + "wombat", + "jellyfish", + "sea anemone", + "brain coral", + "flatworm", + "nematode", + "conch", + "snail", + "slug", + "sea slug", + "chiton", + "chambered nautilus", + "Dungeness crab", + "rock crab", + "fiddler crab", + "king crab", + "American lobster", + "spiny lobster", + "crayfish", + "hermit crab", + "isopod", + "white stork", + "black stork", + "spoonbill", + "flamingo", + "little blue heron", + "American egret", + "bittern", + "crane", + "limpkin", + "European gallinule", + "American coot", + "bustard", + "ruddy turnstone", + "red-backed sandpiper", + "redshank", + "dowitcher", + "oystercatcher", + "pelican", + "king penguin", + "albatross", + "grey whale", + "killer whale", + "dugong", + "sea lion", + "Chihuahua", + "Japanese spaniel", + "Maltese dog", + "Pekinese", + "Shih-Tzu", + "Blenheim spaniel", + "papillon", + "toy terrier", + "Rhodesian ridgeback", + "Afghan hound", + "basset", + "beagle", + "bloodhound", + "bluetick", + "black-and-tan coonhound", + "Walker hound", + "English foxhound", + "redbone", + "borzoi", + "Irish wolfhound", + "Italian greyhound", + "whippet", + "Ibizan hound", + "Norwegian elkhound", + "otterhound", + "Saluki", + "Scottish deerhound", + "Weimaraner", + "Staffordshire bullterrier", + "American Staffordshire terrier", + "Bedlington terrier", + "Border terrier", + "Kerry blue terrier", + "Irish terrier", + "Norfolk terrier", + "Norwich terrier", + "Yorkshire terrier", + "wire-haired fox terrier", + "Lakeland terrier", + "Sealyham terrier", + "Airedale", + "cairn", + "Australian terrier", + "Dandie Dinmont", + "Boston bull", + "miniature schnauzer", + "giant schnauzer", + "standard schnauzer", + "Scotch terrier", + "Tibetan terrier", + "silky terrier", + "soft-coated wheaten terrier", + "West Highland white terrier", + "Lhasa", + "flat-coated retriever", + "curly-coated retriever", + "golden retriever", + "Labrador retriever", + "Chesapeake Bay retriever", + "German short-haired pointer", + "vizsla", + "English setter", + "Irish setter", + "Gordon setter", + "Brittany spaniel", + "clumber", + "English springer", + "Welsh springer spaniel", + "cocker spaniel", + "Sussex spaniel", + "Irish water spaniel", + "kuvasz", + "schipperke", + "groenendael", + "malinois", + "briard", + "kelpie", + "komondor", + "Old English sheepdog", + "Shetland sheepdog", + "collie", + "Border collie", + "Bouvier des Flandres", + "Rottweiler", + "German shepherd", + "Doberman", + "miniature pinscher", + "Greater Swiss Mountain dog", + "Bernese mountain dog", + "Appenzeller", + "EntleBucher", + "boxer", + "bull mastiff", + "Tibetan mastiff", + "French bulldog", + "Great Dane", + "Saint Bernard", + "Eskimo dog", + "malamute", + "Siberian husky", + "dalmatian", + "affenpinscher", + "basenji", + "pug", + "Leonberg", + "Newfoundland", + "Great Pyrenees", + "Samoyed", + "Pomeranian", + "chow", + "keeshond", + "Brabancon griffon", + "Pembroke", + "Cardigan", + "toy poodle", + "miniature poodle", + "standard poodle", + "Mexican hairless", + "timber wolf", + "white wolf", + "red wolf", + "coyote", + "dingo", + "dhole", + "African hunting dog", + "hyena", + "red fox", + "kit fox", + "Arctic fox", + "grey fox", + "tabby", + "tiger cat", + "Persian cat", + "Siamese cat", + "Egyptian cat", + "cougar", + "lynx", + "leopard", + "snow leopard", + "jaguar", + "lion", + "tiger", + "cheetah", + "brown bear", + "American black bear", + "ice bear", + "sloth bear", + "mongoose", + "meerkat", + "tiger beetle", + "ladybug", + "ground beetle", + "long-horned beetle", + "leaf beetle", + "dung beetle", + "rhinoceros beetle", + "weevil", + "fly", + "bee", + "ant", + "grasshopper", + "cricket", + "walking stick", + "cockroach", + "mantis", + "cicada", + "leafhopper", + "lacewing", + "dragonfly", + "damselfly", + "admiral", + "ringlet", + "monarch", + "cabbage butterfly", + "sulphur butterfly", + "lycaenid", + "starfish", + "sea urchin", + "sea cucumber", + "wood rabbit", + "hare", + "Angora", + "hamster", + "porcupine", + "fox squirrel", + "marmot", + "beaver", + "guinea pig", + "sorrel", + "zebra", + "hog", + "wild boar", + "warthog", + "hippopotamus", + "ox", + "water buffalo", + "bison", + "ram", + "bighorn", + "ibex", + "hartebeest", + "impala", + "gazelle", + "Arabian camel", + "llama", + "weasel", + "mink", + "polecat", + "black-footed ferret", + "otter", + "skunk", + "badger", + "armadillo", + "three-toed sloth", + "orangutan", + "gorilla", + "chimpanzee", + "gibbon", + "siamang", + "guenon", + "patas", + "baboon", + "macaque", + "langur", + "colobus", + "proboscis monkey", + "marmoset", + "capuchin", + "howler monkey", + "titi", + "spider monkey", + "squirrel monkey", + "Madagascar cat", + "indri", + "Indian elephant", + "African elephant", + "lesser panda", + "giant panda", + "barracouta", + "eel", + "coho", + "rock beauty", + "anemone fish", + "sturgeon", + "gar", + "lionfish", + "puffer", + "abacus", + "abaya", + "academic gown", + "accordion", + "acoustic guitar", + "aircraft carrier", + "airliner", + "airship", + "altar", + "ambulance", + "amphibian", + "analog clock", + "apiary", + "apron", + "ashcan", + "assault rifle", + "backpack", + "bakery", + "balance beam", + "balloon", + "ballpoint", + "Band Aid", + "banjo", + "bannister", + "barbell", + "barber chair", + "barbershop", + "barn", + "barometer", + "barrel", + "barrow", + "baseball", + "basketball", + "bassinet", + "bassoon", + "bathing cap", + "bath towel", + "bathtub", + "beach wagon", + "beacon", + "beaker", + "bearskin", + "beer bottle", + "beer glass", + "bell cote", + "bib", + "bicycle-built-for-two", + "bikini", + "binder", + "binoculars", + "birdhouse", + "boathouse", + "bobsled", + "bolo tie", + "bonnet", + "bookcase", + "bookshop", + "bottlecap", + "bow", + "bow tie", + "brass", + "brassiere", + "breakwater", + "breastplate", + "broom", + "bucket", + "buckle", + "bulletproof vest", + "bullet train", + "butcher shop", + "cab", + "caldron", + "candle", + "cannon", + "canoe", + "can opener", + "cardigan", + "car mirror", + "carousel", + "carpenter's kit", + "carton", + "car wheel", + "cash machine", + "cassette", + "cassette player", + "castle", + "catamaran", + "CD player", + "cello", + "cellular telephone", + "chain", + "chainlink fence", + "chain mail", + "chain saw", + "chest", + "chiffonier", + "chime", + "china cabinet", + "Christmas stocking", + "church", + "cinema", + "cleaver", + "cliff dwelling", + "cloak", + "clog", + "cocktail shaker", + "coffee mug", + "coffeepot", + "coil", + "combination lock", + "computer keyboard", + "confectionery", + "container ship", + "convertible", + "corkscrew", + "cornet", + "cowboy boot", + "cowboy hat", + "cradle", + "crane", + "crash helmet", + "crate", + "crib", + "Crock Pot", + "croquet ball", + "crutch", + "cuirass", + "dam", + "desk", + "desktop computer", + "dial telephone", + "diaper", + "digital clock", + "digital watch", + "dining table", + "dishrag", + "dishwasher", + "disk brake", + "dock", + "dogsled", + "dome", + "doormat", + "drilling platform", + "drum", + "drumstick", + "dumbbell", + "Dutch oven", + "electric fan", + "electric guitar", + "electric locomotive", + "entertainment center", + "envelope", + "espresso maker", + "face powder", + "feather boa", + "file", + "fireboat", + "fire engine", + "fire screen", + "flagpole", + "flute", + "folding chair", + "football helmet", + "forklift", + "fountain", + "fountain pen", + "four-poster", + "freight car", + "French horn", + "frying pan", + "fur coat", + "garbage truck", + "gasmask", + "gas pump", + "goblet", + "go-kart", + "golf ball", + "golfcart", + "gondola", + "gong", + "gown", + "grand piano", + "greenhouse", + "grille", + "grocery store", + "guillotine", + "hair slide", + "hair spray", + "half track", + "hammer", + "hamper", + "hand blower", + "hand-held computer", + "handkerchief", + "hard disc", + "harmonica", + "harp", + "harvester", + "hatchet", + "holster", + "home theater", + "honeycomb", + "hook", + "hoopskirt", + "horizontal bar", + "horse cart", + "hourglass", + "iPod", + "iron", + "jack-o'-lantern", + "jean", + "jeep", + "jersey", + "jigsaw puzzle", + "jinrikisha", + "joystick", + "kimono", + "knee pad", + "knot", + "lab coat", + "ladle", + "lampshade", + "laptop", + "lawn mower", + "lens cap", + "letter opener", + "library", + "lifeboat", + "lighter", + "limousine", + "liner", + "lipstick", + "Loafer", + "lotion", + "loudspeaker", + "loupe", + "lumbermill", + "magnetic compass", + "mailbag", + "mailbox", + "maillot", + "maillot", + "manhole cover", + "maraca", + "marimba", + "mask", + "matchstick", + "maypole", + "maze", + "measuring cup", + "medicine chest", + "megalith", + "microphone", + "microwave", + "military uniform", + "milk can", + "minibus", + "miniskirt", + "minivan", + "missile", + "mitten", + "mixing bowl", + "mobile home", + "Model T", + "modem", + "monastery", + "monitor", + "moped", + "mortar", + "mortarboard", + "mosque", + "mosquito net", + "motor scooter", + "mountain bike", + "mountain tent", + "mouse", + "mousetrap", + "moving van", + "muzzle", + "nail", + "neck brace", + "necklace", + "nipple", + "notebook", + "obelisk", + "oboe", + "ocarina", + "odometer", + "oil filter", + "organ", + "oscilloscope", + "overskirt", + "oxcart", + "oxygen mask", + "packet", + "paddle", + "paddlewheel", + "padlock", + "paintbrush", + "pajama", + "palace", + "panpipe", + "paper towel", + "parachute", + "parallel bars", + "park bench", + "parking meter", + "passenger car", + "patio", + "pay-phone", + "pedestal", + "pencil box", + "pencil sharpener", + "perfume", + "Petri dish", + "photocopier", + "pick", + "pickelhaube", + "picket fence", + "pickup", + "pier", + "piggy bank", + "pill bottle", + "pillow", + "ping-pong ball", + "pinwheel", + "pirate", + "pitcher", + "plane", + "planetarium", + "plastic bag", + "plate rack", + "plow", + "plunger", + "Polaroid camera", + "pole", + "police van", + "poncho", + "pool table", + "pop bottle", + "pot", + "potter's wheel", + "power drill", + "prayer rug", + "printer", + "prison", + "projectile", + "projector", + "puck", + "punching bag", + "purse", + "quill", + "quilt", + "racer", + "racket", + "radiator", + "radio", + "radio telescope", + "rain barrel", + "recreational vehicle", + "reel", + "reflex camera", + "refrigerator", + "remote control", + "restaurant", + "revolver", + "rifle", + "rocking chair", + "rotisserie", + "rubber eraser", + "rugby ball", + "rule", + "running shoe", + "safe", + "safety pin", + "saltshaker", + "sandal", + "sarong", + "sax", + "scabbard", + "scale", + "school bus", + "schooner", + "scoreboard", + "screen", + "screw", + "screwdriver", + "seat belt", + "sewing machine", + "shield", + "shoe shop", + "shoji", + "shopping basket", + "shopping cart", + "shovel", + "shower cap", + "shower curtain", + "ski", + "ski mask", + "sleeping bag", + "slide rule", + "sliding door", + "slot", + "snorkel", + "snowmobile", + "snowplow", + "soap dispenser", + "soccer ball", + "sock", + "solar dish", + "sombrero", + "soup bowl", + "space bar", + "space heater", + "space shuttle", + "spatula", + "speedboat", + "spider web", + "spindle", + "sports car", + "spotlight", + "stage", + "steam locomotive", + "steel arch bridge", + "steel drum", + "stethoscope", + "stole", + "stone wall", + "stopwatch", + "stove", + "strainer", + "streetcar", + "stretcher", + "studio couch", + "stupa", + "submarine", + "suit", + "sundial", + "sunglass", + "sunglasses", + "sunscreen", + "suspension bridge", + "swab", + "sweatshirt", + "swimming trunks", + "swing", + "switch", + "syringe", + "table lamp", + "tank", + "tape player", + "teapot", + "teddy", + "television", + "tennis ball", + "thatch", + "theater curtain", + "thimble", + "thresher", + "throne", + "tile roof", + "toaster", + "tobacco shop", + "toilet seat", + "torch", + "totem pole", + "tow truck", + "toyshop", + "tractor", + "trailer truck", + "tray", + "trench coat", + "tricycle", + "trimaran", + "tripod", + "triumphal arch", + "trolleybus", + "trombone", + "tub", + "turnstile", + "typewriter keyboard", + "umbrella", + "unicycle", + "upright", + "vacuum", + "vase", + "vault", + "velvet", + "vending machine", + "vestment", + "viaduct", + "violin", + "volleyball", + "waffle iron", + "wall clock", + "wallet", + "wardrobe", + "warplane", + "washbasin", + "washer", + "water bottle", + "water jug", + "water tower", + "whiskey jug", + "whistle", + "wig", + "window screen", + "window shade", + "Windsor tie", + "wine bottle", + "wing", + "wok", + "wooden spoon", + "wool", + "worm fence", + "wreck", + "yawl", + "yurt", + "web site", + "comic book", + "crossword puzzle", + "street sign", + "traffic light", + "book jacket", + "menu", + "plate", + "guacamole", + "consomme", + "hot pot", + "trifle", + "ice cream", + "ice lolly", + "French loaf", + "bagel", + "pretzel", + "cheeseburger", + "hotdog", + "mashed potato", + "head cabbage", + "broccoli", + "cauliflower", + "zucchini", + "spaghetti squash", + "acorn squash", + "butternut squash", + "cucumber", + "artichoke", + "bell pepper", + "cardoon", + "mushroom", + "Granny Smith", + "strawberry", + "orange", + "lemon", + "fig", + "pineapple", + "banana", + "jackfruit", + "custard apple", + "pomegranate", + "hay", + "carbonara", + "chocolate sauce", + "dough", + "meat loaf", + "pizza", + "potpie", + "burrito", + "red wine", + "espresso", + "cup", + "eggnog", + "alp", + "bubble", + "cliff", + "coral reef", + "geyser", + "lakeside", + "promontory", + "sandbar", + "seashore", + "valley", + "volcano", + "ballplayer", + "groom", + "scuba diver", + "rapeseed", + "daisy", + "yellow lady's slipper", + "corn", + "acorn", + "hip", + "buckeye", + "coral fungus", + "agaric", + "gyromitra", + "stinkhorn", + "earthstar", + "hen-of-the-woods", + "bolete", + "ear", + "toilet tissue" +] diff --git a/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/extract_inception_v3.py b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/extract_inception_v3.py new file mode 100644 index 0000000..29929ae --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/extract_inception_v3.py @@ -0,0 +1,63 @@ +import json +from typing import Dict, List + +import tensorflow as tf +import tensorflow_hub as hub +from tensorflow import keras + + +def get_label(json_path: str = "./data/image_net_labels.json") -> List[str]: + with open(json_path, "r") as f: + labels = json.load(f) + return labels + + +def load_hub_model() -> tf.keras.Model: + model = tf.keras.Sequential([hub.KerasLayer("https://tfhub.dev/google/imagenet/inception_v3/classification/4")]) + model.build([None, 299, 299, 3]) + return model + + +class InceptionV3Model(tf.keras.Model): + def __init__(self, model: tf.keras.Model, labels: List[str]): + super().__init__(self) + self.model = model + self.labels = labels + + @tf.function(input_signature=[tf.TensorSpec(shape=[None], dtype=tf.string, name="image")]) + def serving_fn(self, input_img: str) -> tf.Tensor: + def _base64_to_array(img): + img = tf.io.decode_base64(img) + img = tf.io.decode_jpeg(img) + img = tf.image.convert_image_dtype(img, tf.float32) + img = tf.image.resize(img, (299, 299)) + img = tf.reshape(img, (299, 299, 3)) + return img + + img = tf.map_fn(_base64_to_array, input_img, dtype=tf.float32) + predictions = self.model(img) + + def _convert_to_label(predictions): + max_prob = tf.math.reduce_max(predictions) + idx = tf.where(tf.equal(predictions, max_prob)) + label = tf.squeeze(tf.gather(self.labels, idx)) + return label + + return tf.map_fn(_convert_to_label, predictions, dtype=tf.string) + + def save(self, export_path="./saved_model/inception_v3/"): + signatures = {"serving_default": self.serving_fn} + tf.keras.backend.set_learning_phase(0) + tf.saved_model.save(self, export_path, signatures=signatures) + + +def main(): + labels = get_label(json_path="./data/image_net_labels.json") + inception_v3_hub_model = load_hub_model() + inception_v3_model = InceptionV3Model(model=inception_v3_hub_model, labels=labels) + version_number = 0 + inception_v3_model.save(export_path=f"./saved_model/inception_v3/{version_number}") + + +if __name__ == "__main__": + main() diff --git a/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/requirements.txt b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/requirements.txt new file mode 100644 index 0000000..e2a5e51 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/requirements.txt @@ -0,0 +1,11 @@ +importlib-metadata>=1.7.0 +joblib>=0.15.1 +numpy>=1.18.5 +Pillow>=8.3.2 +psutil==5.7.0 +pydantic>=1.8.2 +PyYAML>=5.3.1 +tensorflow-serving-api>=2.5.2 +tensorflow>=2.5.1 +tensorflow-hub>=0.10.0 +typing>=3.7.4.1 diff --git a/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/tf_serving_entrypoint.sh b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/tf_serving_entrypoint.sh new file mode 100644 index 0000000..275b958 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/imagenet_inception_v3/tf_serving_entrypoint.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -eu + +PORT=${PORT:-8500} +REST_API_PORT=${REST_API_PORT:-8501} +MODEL_NAME=${MODEL_NAME:-"inception_v3"} +MODEL_BASE_PATH=${MODEL_BASE_PATH:-"/asynchronous_pattern/saved_model/${MODEL_NAME}"} + +tensorflow_model_server \ + --port=${PORT} \ + --rest_api_port=${REST_API_PORT} \ + --model_name=${MODEL_NAME} \ + --model_base_path=${MODEL_BASE_PATH} \ No newline at end of file diff --git a/MLOps/serving_patterns/asynchronous_pattern/makefile b/MLOps/serving_patterns/asynchronous_pattern/makefile new file mode 100644 index 0000000..7f582e2 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/makefile @@ -0,0 +1,76 @@ +DOCKER_REPOSITORY := shibui/ml-system-in-actions + +ABSOLUTE_PATH := $(shell pwd) + +DOCKERFILE := Dockerfile +DOCKER_COMPOSE := docker-compose.yml +IMAGE_VERSION := 0.0.1 + +ASYNCHRONOUS_PATTERN := asynchronous_pattern +ASYNCHRONOUS_PATTERN_PROXY := asynchronous_proxy +ASYNCHRONOUS_PATTERN_PROXY_PORT := 8000 +ASYNCHRONOUS_PATTERN_SERVER := imagenet_inception_v3 +ASYNCHRONOUS_PATTERN_GRPC_PORT := 8500 +ASYNCHRONOUS_PATTERN_REST_PORT := 8501 +ASYNCHRONOUS_PATTERN_BACKEND := asynchronous_backend + +.PHONY: build_proxy +build_proxy: + docker build \ + -t $(DOCKER_REPOSITORY):$(ASYNCHRONOUS_PATTERN)_$(ASYNCHRONOUS_PATTERN_PROXY)_$(IMAGE_VERSION) \ + -f ./$(DOCKERFILE).proxy . + +.PHONY: push_proxy +push_proxy: + docker push $(DOCKER_REPOSITORY):$(ASYNCHRONOUS_PATTERN)_$(ASYNCHRONOUS_PATTERN_PROXY)_$(IMAGE_VERSION) + +.PHONY: build_server +build_server: + docker build \ + -t $(DOCKER_REPOSITORY):$(ASYNCHRONOUS_PATTERN)_$(ASYNCHRONOUS_PATTERN_SERVER)_$(IMAGE_VERSION) \ + -f ./$(ASYNCHRONOUS_PATTERN_SERVER)/$(DOCKERFILE) . + +.PHONY: push_server +push_server: + docker push $(DOCKER_REPOSITORY):$(ASYNCHRONOUS_PATTERN)_$(ASYNCHRONOUS_PATTERN_SERVER)_$(IMAGE_VERSION) + +.PHONY: build_backend +build_backend: + docker build \ + -t $(DOCKER_REPOSITORY):$(ASYNCHRONOUS_PATTERN)_$(ASYNCHRONOUS_PATTERN_BACKEND)_$(IMAGE_VERSION) \ + -f ./$(DOCKERFILE).backend . + +.PHONY: run_backend +run_backend: + docker run \ + -d \ + --name $(ASYNCHRONOUS_PATTERN_BACKEND) \ + $(DOCKER_REPOSITORY):$(ASYNCHRONOUS_PATTERN)_$(ASYNCHRONOUS_PATTERN_BACKEND)_$(IMAGE_VERSION) + +.PHONY: push_backend +push_backend: + docker push $(DOCKER_REPOSITORY):$(ASYNCHRONOUS_PATTERN)_$(ASYNCHRONOUS_PATTERN_BACKEND)_$(IMAGE_VERSION) + +.PHONY: build_all +build_all: build_proxy build_server build_backend + +.PHONY: run_all +run_all: run_proxy run_server run_backend + +.PHONY: push_all +push_all: push_proxy push_server push_backend + +.PHONY: c_build +c_build: build_all + +.PHONY: c_up +c_up: + docker-compose \ + -f ./$(DOCKER_COMPOSE) \ + up -d + +.PHONY: c_down +c_down: + docker-compose \ + -f ./$(DOCKER_COMPOSE) \ + down diff --git a/MLOps/serving_patterns/asynchronous_pattern/requirements_backend.txt b/MLOps/serving_patterns/asynchronous_pattern/requirements_backend.txt new file mode 100644 index 0000000..ca3e2eb --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/requirements_backend.txt @@ -0,0 +1,21 @@ +importlib-metadata>=1.7.0 +numpy>=1.18.5 +psutil>=5.7.0 +typing>=3.7.4.1 +click>=7.1.2 +tensorflow>=2.5.1 +tensorflow-serving-api>=2.5.1 +future>=0.18.2 +importlib-metadata>=1.7.0 +joblib>=0.15.1 +numpy>=1.18.5 +Pillow>=8.3.2 +psutil>=5.7.0 +pydantic>=1.8.2 +PyYAML>=5.3.1 +redis>=3.5.3 +typing>=3.7.4.1 +httptools>=0.1.1 +python-json-logger>=2.0.1 +loguru>=0.5.3 +requests>=2.25.1 \ No newline at end of file diff --git a/MLOps/serving_patterns/asynchronous_pattern/requirements_proxy.txt b/MLOps/serving_patterns/asynchronous_pattern/requirements_proxy.txt new file mode 100644 index 0000000..0d079a5 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/requirements_proxy.txt @@ -0,0 +1,23 @@ +fastapi>=0.65.2 +future>=0.18.2 +gunicorn>=20.0.4 +importlib-metadata>=1.7.0 +joblib>=0.15.1 +numpy>=1.18.5 +onnx>=1.7.0 +onnxruntime>=1.4.0 +Pillow>=8.3.2 +psutil>=5.7.0 +pydantic>=1.8.2 +PyYAML>=5.3.1 +redis>=3.5.3 +scikit-learn>=0.23.1 +skl2onnx>=1.7.0 +starlette>=0.13.4 +typing>=3.7.4.1 +uvicorn>=0.11.7 +uvloop>=0.14.0 +httptools>=0.1.1 +python-json-logger>=2.0.1 +loguru>=0.5.3 +requests>=2.25.1 \ No newline at end of file diff --git a/MLOps/serving_patterns/asynchronous_pattern/run.sh b/MLOps/serving_patterns/asynchronous_pattern/run.sh new file mode 100644 index 0000000..9621e17 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/run.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -eu + +HOST=${HOST:-"0.0.0.0"} +PORT=${PORT:-8000} +WORKERS=${WORKERS:-4} +UVICORN_WORKER=${UVICORN_WORKER:-"uvicorn.workers.UvicornWorker"} +LOGLEVEL=${LOGLEVEL:-"debug"} +LOGCONFIG=${LOGCONFIG:-"./src/utils/logging.conf"} +BACKLOG=${BACKLOG:-2048} +LIMIT_MAX_REQUESTS=${LIMIT_MAX_REQUESTS:-65536} +MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-2048} +GRACEFUL_TIMEOUT=${GRACEFUL_TIMEOUT:-10} +APP_NAME=${APP_NAME:-"src.app.app:app"} + +gunicorn ${APP_NAME} \ + -b ${HOST}:${PORT} \ + -w ${WORKERS} \ + -k ${UVICORN_WORKER} \ + --log-level ${LOGLEVEL} \ + --log-config ${LOGCONFIG} \ + --backlog ${BACKLOG} \ + --max-requests ${LIMIT_MAX_REQUESTS} \ + --max-requests-jitter ${MAX_REQUESTS_JITTER} \ + --graceful-timeout ${GRACEFUL_TIMEOUT} \ + --reload diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/__init__.py b/MLOps/serving_patterns/asynchronous_pattern/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/__init__.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/app.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/app.py new file mode 100644 index 0000000..e3902d5 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/app.py @@ -0,0 +1,16 @@ +import os +from logging import getLogger + +from fastapi import FastAPI +from src.app.routers import routers +from src.configurations import APIConfigurations + +logger = getLogger(__name__) + +app = FastAPI( + title=APIConfigurations.title, + description=APIConfigurations.description, + version=APIConfigurations.version, +) + +app.include_router(routers.router, prefix="", tags=[""]) diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/__init__.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/background_job.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/background_job.py new file mode 100644 index 0000000..9946e13 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/background_job.py @@ -0,0 +1,52 @@ +import logging +from typing import Any, Dict + +from fastapi import BackgroundTasks +from PIL import Image +from pydantic import BaseModel +from src.app.backend.store_data_job import left_push_queue, save_image_redis_job +from src.configurations import CacheConfigurations +from src.constants import CONSTANTS + +logger = logging.getLogger(__name__) + + +class SaveDataJob(BaseModel): + job_id: str + data: Any + queue_name: str = CONSTANTS.REDIS_QUEUE + is_completed: bool = False + + def __call__(self): + pass + + +class SaveDataRedisJob(SaveDataJob): + enqueue: bool = False + + def __call__(self): + save_data_jobs[self.job_id] = self + logger.info(f"registered job: {self.job_id} in {self.__class__.__name__}") + self.is_completed = save_image_redis_job(job_id=self.job_id, image=self.data) + if self.enqueue: + self.is_completed = left_push_queue(self.queue_name, self.job_id) + logger.info(f"completed save data: {self.job_id}") + + +def save_data_job( + data: Image.Image, + job_id: str, + background_tasks: BackgroundTasks, + enqueue: bool = False, +) -> str: + task = SaveDataRedisJob( + job_id=job_id, + data=data, + queue_name=CacheConfigurations.queue_name, + enqueue=enqueue, + ) + background_tasks.add_task(task) + return job_id + + +save_data_jobs: Dict[str, SaveDataJob] = {} diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/data.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/data.py new file mode 100644 index 0000000..b093920 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/data.py @@ -0,0 +1,7 @@ +from typing import Any + +from pydantic import BaseModel + + +class Data(BaseModel): + image_data: Any diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/prediction_batch.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/prediction_batch.py new file mode 100644 index 0000000..b17a5a9 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/prediction_batch.py @@ -0,0 +1,74 @@ +import asyncio +import base64 +import io +import os +from concurrent.futures import ProcessPoolExecutor +from logging import DEBUG, Formatter, StreamHandler, getLogger +from time import sleep + +import grpc +from src.app.backend import request_inception_v3, store_data_job +from src.configurations import CacheConfigurations, ModelConfigurations +from tensorflow_serving.apis import prediction_service_pb2_grpc + +log_format = Formatter("%(asctime)s %(name)s [%(levelname)s] %(message)s") +logger = getLogger("prediction_batch") +stdout_handler = StreamHandler() +stdout_handler.setFormatter(log_format) +logger.addHandler(stdout_handler) +logger.setLevel(DEBUG) + + +def _trigger_prediction_if_queue(stub: prediction_service_pb2_grpc.PredictionServiceStub): + job_id = store_data_job.right_pop_queue(CacheConfigurations.queue_name) + logger.info(f"predict job_id: {job_id}") + if job_id is not None: + data = store_data_job.get_data_redis(job_id) + if data != "": + return True + image_key = store_data_job.make_image_key(job_id) + image_data = store_data_job.get_data_redis(image_key) + decoded = base64.b64decode(image_data) + io_bytes = io.BytesIO(decoded) + prediction = request_inception_v3.request_grpc( + stub=stub, + image=io_bytes.read(), + model_spec_name=ModelConfigurations.model_spec_name, + signature_name=ModelConfigurations.signature_name, + timeout_second=5, + ) + if prediction is not None: + logger.info(f"{job_id} {prediction}") + store_data_job.set_data_redis(job_id, prediction) + else: + store_data_job.left_push_queue(CacheConfigurations.queue_name, job_id) + + +def _loop(): + serving_address = f"{ModelConfigurations.address}:{ModelConfigurations.grpc_port}" + channel = grpc.insecure_channel(serving_address) + stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) + + while True: + sleep(1) + _trigger_prediction_if_queue(stub=stub) + + +def prediction_loop(num_procs: int = 2): + executor = ProcessPoolExecutor(num_procs) + loop = asyncio.get_event_loop() + + for _ in range(num_procs): + asyncio.ensure_future(loop.run_in_executor(executor, _loop)) + + loop.run_forever() + + +def main(): + NUM_PROCS = int(os.getenv("NUM_PROCS", 2)) + prediction_loop(NUM_PROCS) + + +if __name__ == "__main__": + logger.info("start backend") + main() diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/redis_client.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/redis_client.py new file mode 100644 index 0000000..1103b51 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/redis_client.py @@ -0,0 +1,9 @@ +import redis +from src.configurations import RedisCacheConfigurations + +redis_client = redis.Redis( + host=RedisCacheConfigurations.cache_host, + port=RedisCacheConfigurations.cache_port, + db=RedisCacheConfigurations.redis_db, + decode_responses=RedisCacheConfigurations.redis_decode_responses, +) diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/request_inception_v3.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/request_inception_v3.py new file mode 100644 index 0000000..c15c811 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/request_inception_v3.py @@ -0,0 +1,43 @@ +import base64 +import json + +import requests +import tensorflow as tf +from tensorflow_serving.apis import predict_pb2, prediction_service_pb2_grpc + + +def request_grpc( + stub: prediction_service_pb2_grpc.PredictionServiceStub, + image: bytes, + model_spec_name: str = "inception_v3", + signature_name: str = "serving_default", + timeout_second: int = 5, +) -> str: + request = predict_pb2.PredictRequest() + request.model_spec.name = model_spec_name + request.model_spec.signature_name = signature_name + + base64_image = base64.urlsafe_b64encode(image) + request.inputs["image"].CopyFrom(tf.make_tensor_proto([base64_image])) + response = stub.Predict(request, timeout_second) + + prediction = response.outputs["output_0"].string_val[0].decode("utf-8") + return prediction + + +def request_rest( + image: bytes, + model_spec_name: str = "inception_v3", + address: str = "localhost", + port: int = 8501, +): + serving_address = f"http://{address}:{port}/v1/models/{model_spec_name}:predict" + headers = {"Content-Type": "application/json"} + base64_image = base64.urlsafe_b64encode(image).decode("ascii") + request_dict = {"inputs": {"image": [base64_image]}} + response = requests.post( + serving_address, + json.dumps(request_dict), + headers=headers, + ) + return dict(response.json())["outputs"][0] diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/store_data_job.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/store_data_job.py new file mode 100644 index 0000000..f780714 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/backend/store_data_job.py @@ -0,0 +1,63 @@ +import base64 +import io +import json +import logging +from typing import Any, Dict + +import numpy as np +from PIL import Image +from src.app.backend.redis_client import redis_client + +logger = logging.getLogger(__name__) + + +def make_image_key(key: str) -> str: + return f"{key}_image" + + +def left_push_queue(queue_name: str, key: str) -> bool: + try: + redis_client.lpush(queue_name, key) + return True + except Exception: + return False + + +def right_pop_queue(queue_name: str) -> Any: + if redis_client.llen(queue_name) > 0: + return redis_client.rpop(queue_name) + else: + return None + + +def set_data_redis(key: str, value: str) -> bool: + redis_client.set(key, value) + return True + + +def get_data_redis(key: str) -> Any: + data = redis_client.get(key) + return data + + +def set_image_redis(key: str, image: Image.Image) -> str: + bytes_io = io.BytesIO() + image.save(bytes_io, format=image.format) + image_key = make_image_key(key) + encoded = base64.b64encode(bytes_io.getvalue()) + redis_client.set(image_key, encoded) + return image_key + + +def get_image_redis(key: str) -> Image.Image: + redis_data = get_data_redis(key) + decoded = base64.b64decode(redis_data) + io_bytes = io.BytesIO(decoded) + image = Image.open(io_bytes) + return image + + +def save_image_redis_job(job_id: str, image: Image.Image) -> bool: + set_image_redis(job_id, image) + redis_client.set(job_id, "") + return True diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/routers/__init__.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/app/routers/routers.py b/MLOps/serving_patterns/asynchronous_pattern/src/app/routers/routers.py new file mode 100644 index 0000000..579d929 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/app/routers/routers.py @@ -0,0 +1,67 @@ +import base64 +import io +import uuid +from logging import getLogger +from typing import Any, Dict + +import requests +from fastapi import APIRouter, BackgroundTasks +from PIL import Image +from src.app.backend import background_job, store_data_job +from src.app.backend.data import Data +from src.configurations import ModelConfigurations + +logger = getLogger(__name__) +router = APIRouter() + + +@router.get("/health") +def health() -> Dict[str, str]: + return {"health": "ok"} + + +@router.get("/metadata") +def metadata() -> Dict[str, Any]: + model_spec_name = ModelConfigurations.model_spec_name + address = ModelConfigurations.address + port = ModelConfigurations.rest_port + serving_address = f"http://{address}:{port}/v1/models/{model_spec_name}/versions/0/metadata" + response = requests.get(serving_address) + return response.json() + + +@router.get("/label") +def label() -> Dict[int, str]: + return ModelConfigurations.labels + + +@router.get("/predict/test") +def predict_test(background_tasks: BackgroundTasks) -> Dict[str, str]: + job_id = str(uuid.uuid4())[:6] + data = Data() + data.image_data = ModelConfigurations.sample_image + background_job.save_data_job(data.image_data, job_id, background_tasks, True) + return {"job_id": job_id} + + +@router.post("/predict") +def predict(data: Data, background_tasks: BackgroundTasks) -> Dict[str, str]: + image = base64.b64decode(str(data.image_data)) + io_bytes = io.BytesIO(image) + data.image_data = Image.open(io_bytes) + job_id = str(uuid.uuid4())[:6] + background_job.save_data_job( + data=data.image_data, + job_id=job_id, + background_tasks=background_tasks, + enqueue=True, + ) + return {"job_id": job_id} + + +@router.get("/job/{job_id}") +def prediction_result(job_id: str) -> Dict[str, Dict[str, str]]: + result = {job_id: {"prediction": ""}} + data = store_data_job.get_data_redis(job_id) + result[job_id]["prediction"] = data + return result diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/configurations.py b/MLOps/serving_patterns/asynchronous_pattern/src/configurations.py new file mode 100644 index 0000000..a8c40fc --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/configurations.py @@ -0,0 +1,62 @@ +import json +import os +from logging import getLogger +from typing import List + +from PIL import Image +from src.constants import CONSTANTS, PLATFORM_ENUM + +logger = getLogger(__name__) + + +def get_label(json_path: str = "./data/image_net_labels.json") -> List[str]: + with open(json_path, "r") as f: + labels = json.load(f) + return labels + + +def read_image(image_file: str = "./data/cat.jpg") -> bytes: + return Image.open(image_file) + + +class PlatformConfigurations: + platform = os.getenv("PLATFORM", PLATFORM_ENUM.DOCKER.value) + if not PLATFORM_ENUM.has_value(platform): + raise ValueError(f"PLATFORM must be one of {[v.value for v in PLATFORM_ENUM.__members__.values()]}") + + +class CacheConfigurations: + cache_host = os.getenv("CACHE_HOST", "redis") + cache_port = int(os.getenv("CACHE_PORT", 6379)) + queue_name = os.getenv("QUEUE_NAME", "queue") + + +class RedisCacheConfigurations(CacheConfigurations): + redis_db = int(os.getenv("REDIS_DB", 0)) + redis_decode_responses = bool(os.getenv("REDIS_DECODE_RESPONSES", True)) + + +class APIConfigurations: + title = os.getenv("API_TITLE", "ServingPattern") + description = os.getenv("API_DESCRIPTION", "machine learning system serving patterns") + version = os.getenv("API_VERSION", "0.1") + + +class ModelConfigurations: + model_spec_name = os.getenv("MODEL_SPEC_NAME", "inception_v3") + signature_name = os.getenv("SIGNATURE_NAME", "serving_default") + address = os.getenv("API_ADDRESS", "localhost") + grpc_port = int(os.getenv("GRPC_PORT", 8500)) + rest_port = int(os.getenv("REST_API_PORT", 8501)) + label_path = os.getenv("LABEL_PATH", "./data/image_net_labels.json") + labels = get_label(json_path=label_path) + + sample_image_path = os.getenv("SAMPLE_IMAGE_PATH", "./data/cat.jpg") + sample_image = read_image(image_file=sample_image_path) + + +logger.info(f"{PlatformConfigurations.__name__}: {PlatformConfigurations.__dict__}") +logger.info(f"{CacheConfigurations.__name__}: {CacheConfigurations.__dict__}") +logger.info(f"{RedisCacheConfigurations.__name__}: {RedisCacheConfigurations.__dict__}") +logger.info(f"{APIConfigurations.__name__}: {APIConfigurations.__dict__}") +logger.info(f"{ModelConfigurations.__name__}: {ModelConfigurations.__dict__}") diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/constants.py b/MLOps/serving_patterns/asynchronous_pattern/src/constants.py new file mode 100644 index 0000000..23a0075 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/constants.py @@ -0,0 +1,35 @@ +import enum + + +class PLATFORM_ENUM(enum.Enum): + DOCKER = "docker" + DOCKER_COMPOSE = "docker_compose" + KUBERNETES = "kubernetes" + TEST = "test" + + @staticmethod + def has_value(item): + return item in [v.value for v in PLATFORM_ENUM.__members__.values()] + + +def constant(f): + def fset(self, value): + raise TypeError + + def fget(self): + return f() + + return property(fget, fset) + + +class _Constants(object): + @constant + def REDIS_INCREMENTS(): + return "increments" + + @constant + def REDIS_QUEUE(): + return "redis_queue" + + +CONSTANTS = _Constants() diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/utils/__init__.py b/MLOps/serving_patterns/asynchronous_pattern/src/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/utils/logging.conf b/MLOps/serving_patterns/asynchronous_pattern/src/utils/logging.conf new file mode 100644 index 0000000..490b0c1 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/utils/logging.conf @@ -0,0 +1,78 @@ +[loggers] +keys=root, gunicorn.error, gunicorn.access, uvicorn.error, uvicorn.access + +[logger_root] +level=DEBUG +handlers=console + +[logger_gunicorn.error] +level=DEBUG +handlers=gunicorn_error_file, console +propagate=1 +qualname=gunicorn.error + +[logger_gunicorn.access] +level=INFO +handlers=gunicorn_access_file, console +propagate=1 +qualname=gunicorn.access + +[logger_uvicorn.error] +level=DEBUG +handlers=uvicorn_error_file, console +propagate=1 +qualname=uvicorn.error + +[logger_uvicorn.access] +level=INFO +handlers=uvicorn_access_file, console +propagate=1 +qualname=gunicorn.access + + +[handlers] +keys=console, gunicorn_error_file, gunicorn_access_file, uvicorn_error_file, uvicorn_access_file + +[handler_console] +class=StreamHandler +formatter=generic +args=(sys.stdout, ) + +[handler_gunicorn_error_file] +class=logging.FileHandler +formatter=generic +args=('/var/log/gunicorn_error.log', 'a') + +[handler_gunicorn_access_file] +class=logging.FileHandler +formatter=gunicorn_access +args=('/var/log/gunicorn_access.log', 'a') + +[handler_uvicorn_error_file] +class=logging.FileHandler +formatter=generic +args=('/var/log/uvicorn_error.log', 'a') + +[handler_uvicorn_access_file] +class=logging.FileHandler +formatter=uvicorn_access +args=('/var/log/uvicorn_access.log', 'a') + + +[formatters] +keys=generic, gunicorn_access, uvicorn_access + +[formatter_generic] +class=logging.Formatter +format=[%(asctime)s] [%(levelname)s] [%(process)d] [%(name)s] [%(funcName)s] [%(lineno)d] %(message)s +datefmt=%Y-%m-%d %H:%M:%S + +[formatter_gunicorn_access] +class=logging.Formatter +format=[%(asctime)s] %(h)s %(l)s %(u)s %(t)s %(r)s %(m)s %(U)s %(q)s %(H)s %(s)s %(b)s %(f)s %(a)s %(D)s %(p)s +datefmt=%d/%b/%Y:%H:%M:%S (%Z) + +[formatter_uvicorn_access] +class=logging.Formatter +format=[%(asctime)s] [%(levelname)s] [%(process)d] [%(name)s] [%(funcName)s] [%(lineno)d] %(message)s +datefmt=%d/%b/%Y:%H:%M:%S (%Z) \ No newline at end of file diff --git a/MLOps/serving_patterns/asynchronous_pattern/src/utils/profiler.py b/MLOps/serving_patterns/asynchronous_pattern/src/utils/profiler.py new file mode 100644 index 0000000..8f7b1b9 --- /dev/null +++ b/MLOps/serving_patterns/asynchronous_pattern/src/utils/profiler.py @@ -0,0 +1,23 @@ +import cProfile +import os +from logging import getLogger + +logger = getLogger(__name__) + + +def do_cprofile(func): + def profiled_func(*args, **kwargs): + enable_profile = int(os.getenv("PROFILE", 1)) + if enable_profile: + profile = cProfile.Profile() + try: + profile.enable() + result = func(*args, **kwargs) + profile.disable() + return result + finally: + profile.print_stats() + else: + return func(*args, **kwargs) + + return profiled_func diff --git a/MLOps/serving_patterns/batch_pattern/.dockerignore b/MLOps/serving_patterns/batch_pattern/.dockerignore new file mode 100644 index 0000000..feab7e4 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/.dockerignore @@ -0,0 +1,12 @@ +dockerfile +Dockerfile +.dockerignore +log +tmp +*.sqlite3 +*.sqlite3-journal +__pycache__ +.pytest_cache +*.dvc +dvc.yaml +dvc.lock diff --git a/MLOps/serving_patterns/batch_pattern/.gitignore b/MLOps/serving_patterns/batch_pattern/.gitignore new file mode 100644 index 0000000..2f44b68 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/.gitignore @@ -0,0 +1,2 @@ + +mysql/* \ No newline at end of file diff --git a/MLOps/serving_patterns/batch_pattern/Dockerfile.api b/MLOps/serving_patterns/batch_pattern/Dockerfile.api new file mode 100644 index 0000000..39597ee --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/Dockerfile.api @@ -0,0 +1,28 @@ +FROM python:3.8-slim + +ENV PROJECT_DIR batch_pattern +WORKDIR /${PROJECT_DIR} +ADD ./requirements.txt /${PROJECT_DIR}/ +RUN apt-get -y update && \ + apt-get -y install \ + apt-utils \ + gcc \ + curl \ + libmariadb-dev \ + default-mysql-client && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip install --no-cache-dir -r requirements.txt + +COPY ./src/ /${PROJECT_DIR}/src/ +COPY ./models/ /${PROJECT_DIR}/models/ + +ENV MODEL_FILEPATH /${PROJECT_DIR}/models/iris_svc.onnx +ENV LABEL_FILEPATH /${PROJECT_DIR}/models/label.json +ENV SAMPLE_DATA_PATH /${PROJECT_DIR}/models/data.json +ENV LOG_LEVEL DEBUG +ENV LOG_FORMAT TEXT + +COPY ./run.sh /${PROJECT_DIR}/run.sh +RUN chmod +x /${PROJECT_DIR}/run.sh +CMD [ "./run.sh" ] diff --git a/MLOps/serving_patterns/batch_pattern/Dockerfile.batch b/MLOps/serving_patterns/batch_pattern/Dockerfile.batch new file mode 100644 index 0000000..6c0c211 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/Dockerfile.batch @@ -0,0 +1,27 @@ +FROM python:3.8-slim + +ENV PROJECT_DIR batch_pattern +WORKDIR /${PROJECT_DIR} +ADD ./requirements.txt /${PROJECT_DIR}/ +RUN apt-get -y update && \ + apt-get -y install \ + apt-utils \ + gcc \ + curl \ + libmariadb-dev \ + default-mysql-client && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip install --no-cache-dir -r requirements.txt + +COPY ./src/ /${PROJECT_DIR}/src/ +COPY ./models/ /${PROJECT_DIR}/models/ + +ENV MODEL_FILEPATH /${PROJECT_DIR}/models/iris_svc.onnx +ENV LABEL_FILEPATH /${PROJECT_DIR}/models/label.json +ENV SAMPLE_DATA_PATH /${PROJECT_DIR}/models/data.json +ENV LOG_LEVEL DEBUG +ENV LOG_FORMAT TEXT + + +CMD [ "python", "-m", "src.task.job" ] diff --git a/MLOps/serving_patterns/batch_pattern/README.md b/MLOps/serving_patterns/batch_pattern/README.md new file mode 100644 index 0000000..2d34d11 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/README.md @@ -0,0 +1,81 @@ +# 배치 추론 패턴 + +## 목적 + +배치로 추론을 실행 + +## 전제 + +- Python 3.8 이상 +- Docker +- Docker compose + +## 사용법 + +0. 현재 디렉토리 + +```sh +$ pwd +~/ml-system-in-actions/chapter4_serving_patterns/batch_pattern +``` + +1. Docker 이미지 빌드 + +```sh +$ make build_all +# 실행 커맨드 +# docker build \ +# -t shibui/ml-system-in-actions:batch_pattern_api_0.0.1 \ +# -f Dockerfile.api \ +# . +# docker build \ +# -t shibui/ml-system-in-actions:batch_pattern_batch_0.0.1 \ +# -f Dockerfile.batch \ +# . +``` + +2. Docker compose 로 각 서비스 기동 + +```sh +$ make c_up +# 실행 커맨드 +# docker-compose \ +# -f ./docker-compose.yml \ +# up -d +``` + +3. 기동한 배치 시스템 확인 + +```sh +$ docker ps -a +# 출력 +# CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +# 08ca4db52176 shibui/ml-system-in-actions:batch_pattern_api_0.0.1 "./run.sh" 15 seconds ago Up 14 seconds 0.0.0.0:8000->8000/tcp api +# ab5f46b1e866 shibui/ml-system-in-actions:batch_pattern_batch_0.0.1 "python -m src.task.…" 15 seconds ago Up 14 seconds job +# d1d49ea39bf0 mysql:5.7 "docker-entrypoint.s…" 16 seconds ago Up 15 seconds 0.0.0.0:3306->3306/tcp, 33060/tcp mysql + + +$ docker logs job -f +# 출력 +# 2021-01-30 12:41:52,739 INFO waiting for batch to start +# 2021-01-30 12:42:52,689 INFO starting batch +# 2021-01-30 12:42:52,795 INFO predict data size: 4000 +# 2021-01-30 12:42:52,796 DEBUG prediction log: 4002 [4.3, 2.4, 3, 2.1] [0.13043216 0.5781998 0.29136813] +# 2021-01-30 12:42:52,798 DEBUG prediction log: 4003 [4.3, 2.4, 3, 2.1] [0.13043216 0.5781998 0.29136813] +# 2021-01-30 12:42:52,799 DEBUG prediction log: 4004 [4.3, 2.4, 3, 2.1] [0.13043216 0.5781998 0.29136813] +# 2021-01-30 12:42:52,800 DEBUG prediction log: 4005 [4, 3.9, 3, 6] [0.36126029 0.25740659 0.3813332 ] +# 2021-01-30 12:42:52,801 DEBUG prediction log: 4006 [4, 3.9, 3, 6] [0.36126029 0.25740659 0.3813332 ] +# 2021-01-30 12:42:52,807 DEBUG prediction log: 4007 [4, 3.9, 3, 6] [0.36126029 0.25740659 0.3813332 ] +# 2021-01-30 12:42:52,808 DEBUG prediction log: 4001 [4.3, 2.4, 3, 2.1] [0.13043216 0.5781998 0.29136813] +# 2021-01-30 12:42:52,808 DEBUG prediction log: 4009 [6, 3.9, 5.3, 5.7] [0.35905859 0.25630024 0.3846412 ] +``` + +4. Docker compose 정지 + +```sh +$ make c_down +# 실행 커맨드 +# docker-compose \ +# -f ./docker-compose.yml \ +# down +``` diff --git a/MLOps/serving_patterns/batch_pattern/__init__.py b/MLOps/serving_patterns/batch_pattern/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/batch_pattern/docker-compose.yml b/MLOps/serving_patterns/batch_pattern/docker-compose.yml new file mode 100644 index 0000000..327517a --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/docker-compose.yml @@ -0,0 +1,50 @@ +version: "3" + +services: + mysql: + image: mysql:5.7 + container_name: mysql + ports: + - 3306:3306 + volumes: + - ./mysql/db/data:/var/lib/mysql + - ./mysql/db/initdb.d:/docker-entrypoint-initdb.d + - ./mysql/conf.d:/etc/mysql/conf.d + environment: + - MYSQL_ROOT_PASSWORD=password + - MYSQL_DATABASE=sample_db + - MYSQL_USER=user + - MYSQL_PASSWORD=password + hostname: mysql + restart: always + stdin_open: true + + api: + container_name: api + image: shibui/ml-system-in-actions:batch_pattern_api_0.0.1 + restart: always + environment: + - MYSQL_DATABASE=sample_db + - MYSQL_USER=user + - MYSQL_PASSWORD=password + - MYSQL_PORT=3306 + - MYSQL_SERVER=mysql + entrypoint: ["./run.sh"] + ports: + - "8000:8000" + depends_on: + - mysql + + job: + container_name: job + image: shibui/ml-system-in-actions:batch_pattern_batch_0.0.1 + restart: always + environment: + - MYSQL_DATABASE=sample_db + - MYSQL_USER=user + - MYSQL_PASSWORD=password + - MYSQL_PORT=3306 + - MYSQL_SERVER=mysql + entrypoint: ["python", "-m", "src.task.job"] + depends_on: + - mysql diff --git a/MLOps/serving_patterns/batch_pattern/makefile b/MLOps/serving_patterns/batch_pattern/makefile new file mode 100644 index 0000000..846d4c3 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/makefile @@ -0,0 +1,71 @@ +DOCKER_REPOSITORY := shibui/ml-system-in-actions + +ABSOLUTE_PATH := $(shell pwd) + +DOCKERFILE := Dockerfile +DOCKER_COMPOSE := docker-compose.yml +IMAGE_VERSION := 0.0.1 + +BATCH_PATTERN := batch_pattern +BATCH_PATTERN_PORT := 8000 + + +.PHONY: build_api +build_api: + docker build \ + -t $(DOCKER_REPOSITORY):$(BATCH_PATTERN)_api_$(IMAGE_VERSION) \ + -f $(DOCKERFILE).api \ + . + +.PHONY: stop_api +stop_api: + docker rm -f $(BATCH_PATTERN)_api + +.PHONY: push_api +push_api: + docker push $(DOCKER_REPOSITORY):$(BATCH_PATTERN)_api_$(IMAGE_VERSION) + +.PHONY: build_batch +build_batch: + docker build \ + -t $(DOCKER_REPOSITORY):$(BATCH_PATTERN)_batch_$(IMAGE_VERSION) \ + -f $(DOCKERFILE).batch \ + . + +.PHONY: stop_batch +stop_batch: + docker rm -f $(BATCH_PATTERN)_batch + +.PHONY: push_batch +push_batch: + docker push $(DOCKER_REPOSITORY):$(BATCH_PATTERN)_batch_$(IMAGE_VERSION) + +.PHONY: build_all +build_all: build_api build_batch + + +.PHONY: push_all +push_all: push_api push_batch + +.PHONY: c_build +c_build: build_all + +.PHONY: c_up +c_up: + docker-compose \ + -f ./$(DOCKER_COMPOSE) \ + up -d + +.PHONY: c_down +c_down: + docker-compose \ + -f ./$(DOCKER_COMPOSE) \ + down + + +.PHONY: deploy +deploy: + kubectl apply -f manifests/namespace.yml + kubectl apply -f manifests/ + + diff --git a/MLOps/serving_patterns/batch_pattern/manifests/cron_jobs.yml b/MLOps/serving_patterns/batch_pattern/manifests/cron_jobs.yml new file mode 100644 index 0000000..c42484d --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/manifests/cron_jobs.yml @@ -0,0 +1,34 @@ +apiVersion: batch/v1beta1 +kind: CronJob +metadata: + name: batch-job + namespace: batch +spec: + schedule: "0 * * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: batch-job + image: shibui/ml-system-in-actions:batch_pattern_batch_0.0.1 + env: + - name: MYSQL_DATABASE + value: sample_db + - name: MYSQL_USER + value: user + - name: MYSQL_PASSWORD + value: password + - name: MYSQL_PORT + value: "3306" + - name: MYSQL_SERVER + value: mysql.batch.svc.cluster.local + command: + - python + - -m + - src.task.job + resources: + requests: + cpu: 1000m + memory: "1000Mi" + restartPolicy: OnFailure diff --git a/MLOps/serving_patterns/batch_pattern/manifests/deployment.yml b/MLOps/serving_patterns/batch_pattern/manifests/deployment.yml new file mode 100644 index 0000000..fc7e388 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/manifests/deployment.yml @@ -0,0 +1,59 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: api + namespace: batch + labels: + app: api +spec: + replicas: 3 + selector: + matchLabels: + app: api + template: + metadata: + labels: + app: api + spec: + containers: + - name: api + image: shibui/ml-system-in-actions:batch_pattern_api_0.0.1 + imagePullPolicy: Always + ports: + - containerPort: 8000 + resources: + limits: + cpu: 500m + memory: "300Mi" + requests: + cpu: 500m + memory: "300Mi" + command: + - ./run.sh + env: + - name: MYSQL_DATABASE + value: sample_db + - name: MYSQL_USER + value: user + - name: MYSQL_PASSWORD + value: password + - name: MYSQL_PORT + value: "3306" + - name: MYSQL_SERVER + value: mysql.batch.svc.cluster.local + +--- +apiVersion: v1 +kind: Service +metadata: + name: api + namespace: batch + labels: + app: api +spec: + ports: + - name: rest + port: 8000 + protocol: TCP + selector: + app: api diff --git a/MLOps/serving_patterns/batch_pattern/manifests/mysql.yml b/MLOps/serving_patterns/batch_pattern/manifests/mysql.yml new file mode 100644 index 0000000..1150af8 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/manifests/mysql.yml @@ -0,0 +1,41 @@ +apiVersion: v1 +kind: Pod +metadata: + name: mysql + namespace: batch + labels: + app: mysql +spec: + containers: + - name: mysql + image: mysql:5.7 + imagePullPolicy: Always + ports: + - containerPort: 3306 + env: + - name: MYSQL_ROOT_PASSWORD + value: password + - name: MYSQL_DATABASE + value: sample_db + - name: MYSQL_USER + value: user + - name: MYSQL_PASSWORD + value: password + resources: + requests: + cpu: 1000m + memory: "1000Mi" + +--- +apiVersion: v1 +kind: Service +metadata: + name: mysql + namespace: batch + labels: + app: mysql +spec: + ports: + - port: 3306 + selector: + app: mysql diff --git a/MLOps/serving_patterns/batch_pattern/manifests/namespace.yml b/MLOps/serving_patterns/batch_pattern/manifests/namespace.yml new file mode 100644 index 0000000..2e97e12 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/manifests/namespace.yml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: batch diff --git a/MLOps/serving_patterns/batch_pattern/models/data.json b/MLOps/serving_patterns/batch_pattern/models/data.json new file mode 100644 index 0000000..ac30482 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/models/data.json @@ -0,0 +1,1002 @@ +[ + [4.3, 2.4, 3.0, 2.1], + [4.0, 3.9, 3.0, 6.0], + [6.0, 3.9, 5.3, 5.7], + [2.7, 4.4, 2.0, 2.9], + [2.6, 1.5, 3.5, 3.8], + [5.3, 4.0, 4.7, 3.3], + [3.9, 5.6, 2.2, 4.3], + [5.1, 2.7, 2.3, 3.5], + [2.5, 4.4, 1.7, 3.4], + [3.5, 2.0, 5.8, 2.3], + [1.7, 1.7, 3.0, 1.5], + [3.7, 2.5, 2.1, 3.3], + [3.9, 3.0, 5.7, 3.0], + [3.2, 3.2, 3.3, 3.8], + [2.4, 1.6, 5.7, 3.6], + [5.7, 2.0, 4.6, 4.5], + [3.8, 2.1, 4.9, 3.3], + [1.0, 5.0, 5.0, 2.8], + [3.8, 4.0, 4.1, 5.5], + [6.0, 4.6, 4.6, 5.9], + [3.9, 5.3, 5.5, 1.5], + [4.6, 4.9, 3.9, 5.8], + [5.2, 1.6, 1.8, 4.9], + [1.4, 3.7, 1.8, 4.6], + [3.2, 5.6, 2.1, 4.9], + [2.5, 5.1, 3.5, 3.8], + [2.9, 5.4, 4.5, 4.6], + [3.0, 5.5, 5.5, 2.4], + [3.0, 3.2, 4.3, 5.0], + [2.1, 5.3, 1.5, 5.3], + [3.7, 2.3, 5.7, 1.1], + [1.1, 3.6, 1.1, 4.7], + [3.5, 3.4, 4.9, 2.1], + [2.0, 2.9, 3.3, 3.9], + [1.5, 4.2, 4.0, 1.1], + [5.0, 4.3, 2.9, 3.8], + [5.8, 1.3, 1.6, 1.6], + [5.2, 5.0, 4.4, 2.0], + [3.5, 2.6, 3.7, 5.5], + [2.4, 5.0, 1.4, 2.2], + [4.1, 4.8, 2.1, 2.6], + [2.1, 2.2, 3.0, 5.3], + [2.6, 5.6, 4.4, 4.7], + [4.1, 3.7, 5.3, 1.7], + [2.9, 4.8, 2.2, 4.9], + [1.4, 4.7, 1.0, 5.1], + [2.3, 4.8, 3.0, 4.4], + [1.2, 2.4, 3.2, 5.6], + [4.0, 2.7, 1.7, 3.0], + [4.1, 1.2, 3.4, 4.1], + [5.2, 1.8, 3.7, 3.5], + [3.4, 1.2, 1.1, 2.8], + [3.6, 3.1, 2.9, 4.9], + [5.0, 1.9, 2.9, 2.6], + [4.3, 3.7, 2.2, 5.7], + [2.7, 2.4, 1.7, 2.2], + [4.0, 2.5, 3.2, 1.2], + [2.7, 1.8, 2.0, 4.2], + [3.2, 3.5, 3.2, 1.4], + [5.1, 3.8, 1.2, 5.1], + [2.8, 3.3, 1.4, 5.1], + [4.3, 5.2, 1.7, 1.0], + [1.0, 1.6, 3.8, 3.1], + [1.7, 5.9, 5.5, 5.2], + [5.9, 2.9, 2.4, 6.0], + [4.0, 5.7, 5.5, 1.3], + [2.9, 2.4, 2.6, 3.2], + [1.7, 1.1, 5.4, 4.9], + [2.0, 2.3, 3.2, 4.8], + [3.4, 3.5, 3.7, 2.0], + [5.6, 1.3, 5.1, 4.8], + [2.2, 4.2, 4.4, 1.6], + [4.0, 5.4, 3.5, 5.6], + [6.0, 4.1, 2.9, 1.7], + [4.1, 4.8, 1.1, 5.6], + [2.8, 3.1, 4.0, 1.3], + [1.4, 3.4, 4.9, 1.1], + [4.6, 5.8, 2.2, 4.4], + [3.6, 2.7, 3.6, 5.3], + [4.1, 6.0, 4.2, 4.5], + [4.9, 1.9, 5.8, 4.1], + [4.8, 1.2, 2.4, 1.9], + [2.0, 1.2, 1.4, 5.6], + [5.6, 1.0, 5.2, 3.9], + [3.1, 1.9, 3.3, 1.4], + [1.7, 3.8, 2.9, 1.1], + [3.2, 5.2, 4.5, 1.9], + [2.4, 1.7, 3.9, 3.6], + [3.0, 1.4, 5.8, 4.5], + [4.5, 5.1, 2.9, 5.1], + [2.9, 3.5, 3.6, 4.7], + [3.8, 1.2, 5.4, 2.4], + [3.5, 4.8, 2.6, 2.0], + [1.2, 2.6, 3.8, 1.0], + [1.4, 2.3, 3.4, 3.4], + [1.4, 5.4, 1.3, 3.3], + [4.2, 2.3, 4.6, 2.2], + [4.9, 2.4, 4.9, 4.2], + [3.1, 4.5, 3.8, 3.6], + [4.0, 3.6, 5.5, 5.7], + [5.6, 2.1, 4.6, 5.9], + [3.6, 2.3, 1.8, 3.0], + [3.4, 3.0, 2.9, 5.0], + [2.5, 3.3, 1.6, 1.3], + [4.8, 2.1, 5.7, 2.4], + [2.5, 4.6, 3.8, 4.3], + [2.5, 1.9, 2.0, 5.9], + [3.3, 2.6, 1.7, 1.2], + [2.4, 3.6, 2.3, 4.0], + [4.4, 5.5, 3.9, 2.7], + [5.3, 5.4, 3.4, 4.8], + [4.0, 2.6, 1.3, 1.3], + [1.1, 5.0, 5.7, 3.4], + [5.4, 1.6, 4.2, 5.7], + [5.6, 5.5, 2.9, 4.1], + [3.7, 4.8, 2.9, 3.8], + [5.9, 5.6, 2.8, 3.1], + [1.3, 4.6, 4.2, 3.0], + [2.5, 2.8, 5.9, 2.1], + [2.7, 3.4, 5.7, 5.7], + [2.6, 4.0, 5.5, 4.3], + [3.1, 1.4, 4.5, 3.5], + [3.7, 1.5, 1.9, 2.8], + [5.6, 5.5, 4.1, 1.8], + [2.0, 6.0, 4.7, 5.4], + [3.9, 5.4, 3.0, 1.7], + [2.3, 5.4, 5.3, 4.4], + [5.5, 1.2, 1.6, 4.1], + [3.3, 5.4, 1.9, 2.7], + [2.5, 4.7, 4.6, 5.0], + [4.1, 5.1, 2.4, 4.7], + [3.3, 4.8, 3.8, 1.5], + [2.5, 1.2, 1.0, 5.3], + [1.7, 2.1, 3.9, 5.8], + [2.6, 4.8, 3.0, 3.5], + [2.5, 5.2, 5.7, 4.0], + [3.9, 1.0, 2.8, 5.9], + [2.5, 3.3, 4.3, 4.7], + [4.1, 4.8, 5.3, 2.0], + [1.6, 3.2, 2.9, 1.9], + [2.0, 1.7, 1.6, 4.5], + [1.5, 1.3, 2.9, 5.1], + [1.1, 5.6, 5.5, 3.7], + [3.3, 2.5, 4.8, 4.8], + [2.6, 5.2, 5.0, 3.7], + [1.3, 1.5, 4.4, 3.6], + [3.8, 5.2, 4.1, 3.1], + [5.1, 5.9, 4.9, 4.2], + [1.2, 2.6, 5.8, 2.7], + [4.4, 5.4, 1.6, 5.8], + [1.8, 5.2, 4.1, 4.8], + [2.4, 1.8, 4.0, 2.0], + [3.5, 1.1, 3.5, 2.8], + [2.3, 2.4, 1.6, 4.8], + [5.1, 1.3, 3.9, 2.1], + [3.2, 5.6, 4.4, 5.0], + [3.1, 3.9, 4.2, 2.0], + [5.3, 2.1, 1.4, 4.5], + [4.4, 2.9, 5.1, 5.4], + [2.4, 2.2, 2.8, 4.0], + [5.8, 3.5, 1.8, 1.9], + [4.3, 2.1, 1.8, 5.3], + [2.7, 1.1, 3.8, 4.4], + [2.9, 1.0, 4.7, 4.9], + [2.5, 2.3, 5.5, 4.9], + [2.2, 5.3, 2.8, 4.8], + [2.8, 4.9, 5.2, 3.3], + [1.1, 1.1, 4.0, 1.3], + [3.2, 2.0, 6.0, 3.5], + [6.0, 6.0, 4.8, 5.2], + [4.2, 5.1, 4.1, 1.1], + [2.6, 1.0, 3.3, 4.3], + [5.3, 5.8, 4.0, 5.9], + [3.6, 4.2, 5.3, 3.7], + [4.6, 5.6, 5.1, 3.6], + [4.6, 2.4, 3.4, 5.3], + [1.7, 4.1, 5.2, 4.8], + [4.9, 4.0, 2.5, 3.2], + [1.1, 5.0, 3.5, 3.4], + [4.9, 3.7, 4.8, 4.0], + [3.9, 1.7, 2.1, 4.0], + [4.7, 2.1, 2.4, 3.1], + [3.1, 5.2, 1.5, 2.3], + [4.7, 5.1, 4.5, 5.3], + [1.6, 2.2, 2.4, 2.5], + [5.2, 4.8, 3.5, 2.7], + [5.1, 1.9, 5.3, 1.6], + [2.1, 2.0, 3.9, 4.9], + [1.1, 1.4, 6.0, 3.9], + [3.8, 1.2, 2.0, 4.8], + [4.5, 4.3, 1.6, 5.8], + [1.4, 4.1, 3.0, 5.4], + [3.2, 4.8, 4.0, 2.1], + [1.4, 5.4, 4.8, 3.8], + [3.7, 2.2, 2.7, 2.2], + [4.4, 2.0, 4.5, 2.0], + [5.2, 5.8, 5.1, 4.4], + [2.0, 2.1, 4.9, 2.8], + [4.6, 2.1, 2.7, 5.9], + [2.2, 4.8, 4.9, 1.7], + [2.0, 4.0, 2.1, 1.2], + [3.6, 3.9, 5.6, 3.1], + [1.5, 1.0, 4.1, 1.7], + [3.9, 3.3, 3.0, 6.0], + [1.3, 2.7, 2.0, 3.4], + [2.2, 2.3, 2.4, 4.1], + [4.1, 1.6, 4.9, 4.9], + [5.3, 5.6, 4.3, 1.3], + [4.9, 2.2, 5.3, 5.9], + [2.7, 1.4, 3.2, 5.4], + [3.5, 1.8, 4.7, 5.0], + [4.7, 4.3, 1.7, 2.1], + [3.3, 4.2, 4.1, 1.2], + [4.7, 1.2, 2.3, 4.8], + [4.2, 1.6, 2.4, 1.5], + [1.5, 3.3, 1.2, 4.0], + [1.9, 3.3, 5.3, 4.2], + [1.8, 2.0, 5.0, 2.6], + [4.4, 3.2, 3.2, 2.2], + [3.5, 5.9, 1.0, 2.4], + [4.2, 1.7, 1.5, 5.0], + [2.4, 3.2, 5.9, 1.6], + [3.8, 5.1, 1.3, 2.2], + [4.9, 4.4, 3.5, 2.5], + [4.7, 3.5, 4.9, 5.9], + [4.3, 5.1, 5.5, 5.5], + [5.3, 3.1, 1.3, 3.0], + [5.7, 1.6, 1.6, 5.8], + [1.9, 2.8, 4.5, 4.3], + [2.9, 5.6, 5.2, 3.4], + [5.3, 2.9, 5.4, 2.7], + [3.8, 4.5, 3.1, 3.1], + [1.9, 1.2, 4.2, 6.0], + [2.4, 1.7, 5.4, 5.9], + [4.6, 2.8, 1.8, 3.8], + [1.9, 5.0, 2.1, 3.8], + [5.7, 4.0, 1.6, 3.2], + [5.0, 5.1, 2.7, 4.6], + [4.8, 3.0, 5.7, 5.5], + [3.4, 1.1, 1.1, 5.5], + [1.4, 3.4, 1.6, 5.4], + [2.4, 2.4, 5.7, 5.1], + [1.2, 3.5, 5.7, 2.7], + [5.8, 3.0, 4.7, 4.3], + [4.4, 3.6, 5.2, 5.9], + [1.2, 4.2, 4.2, 4.0], + [5.3, 4.3, 3.8, 1.2], + [5.4, 4.0, 2.0, 4.3], + [4.7, 1.2, 2.5, 3.3], + [1.4, 3.3, 2.9, 4.3], + [2.7, 1.4, 3.8, 2.6], + [4.2, 5.8, 1.7, 5.1], + [2.1, 1.9, 5.0, 4.4], + [5.8, 3.2, 3.4, 5.6], + [1.8, 3.3, 2.5, 5.8], + [5.5, 5.2, 3.4, 5.3], + [5.2, 2.1, 3.8, 2.7], + [2.5, 5.8, 5.1, 2.3], + [1.1, 5.0, 3.3, 4.0], + [3.7, 1.9, 3.1, 5.9], + [3.6, 5.9, 1.2, 3.9], + [5.2, 3.0, 1.2, 3.0], + [4.3, 5.8, 4.5, 4.0], + [4.3, 5.7, 3.2, 1.0], + [3.6, 5.4, 4.2, 5.8], + [4.9, 5.7, 4.9, 2.7], + [3.2, 2.4, 4.5, 3.6], + [2.7, 2.3, 4.0, 5.0], + [2.8, 1.2, 1.1, 2.5], + [3.1, 2.8, 5.3, 5.9], + [4.0, 1.9, 5.5, 3.7], + [2.4, 2.3, 5.2, 4.3], + [5.0, 1.7, 1.5, 4.8], + [2.2, 2.7, 4.3, 3.2], + [1.3, 3.8, 2.7, 1.3], + [2.5, 4.3, 1.8, 5.2], + [1.4, 4.1, 1.8, 4.7], + [5.7, 2.0, 2.7, 3.2], + [1.7, 4.3, 3.6, 4.8], + [2.1, 3.7, 5.7, 4.2], + [3.2, 3.2, 1.0, 5.3], + [5.4, 1.8, 2.4, 5.9], + [5.7, 2.3, 4.3, 4.5], + [4.7, 4.1, 5.6, 1.1], + [2.0, 2.2, 4.0, 5.9], + [2.9, 3.1, 4.3, 5.4], + [2.0, 4.8, 2.4, 2.5], + [1.6, 2.5, 1.0, 1.5], + [1.9, 1.3, 1.2, 5.1], + [2.6, 1.1, 5.9, 1.0], + [4.4, 4.1, 1.6, 3.6], + [5.3, 4.3, 4.1, 2.4], + [4.6, 2.9, 2.8, 2.0], + [4.6, 1.6, 4.1, 1.4], + [3.6, 2.4, 5.0, 2.0], + [1.7, 1.7, 1.8, 3.6], + [1.7, 3.9, 5.6, 3.5], + [6.0, 1.0, 5.4, 3.9], + [3.4, 2.1, 1.0, 5.4], + [4.9, 2.7, 5.9, 3.1], + [3.1, 2.0, 2.4, 1.1], + [4.9, 3.4, 2.4, 3.8], + [4.3, 4.4, 2.8, 5.1], + [4.8, 4.4, 4.1, 1.6], + [5.3, 3.4, 5.7, 5.8], + [4.6, 2.4, 5.0, 5.3], + [5.6, 4.2, 5.8, 4.6], + [3.5, 2.1, 2.5, 4.9], + [3.3, 1.1, 1.4, 5.3], + [1.0, 4.8, 5.6, 3.0], + [4.1, 4.1, 4.4, 3.8], + [4.5, 2.2, 2.8, 3.7], + [2.4, 3.5, 1.7, 5.1], + [3.2, 1.5, 3.2, 5.7], + [4.8, 5.3, 2.7, 4.7], + [5.6, 1.5, 5.8, 5.6], + [1.6, 3.8, 4.2, 3.9], + [3.5, 1.4, 2.7, 1.4], + [3.6, 2.2, 3.5, 3.9], + [5.1, 1.1, 4.4, 1.5], + [2.8, 4.8, 3.6, 3.7], + [3.3, 2.7, 3.1, 5.8], + [2.2, 4.7, 5.0, 2.4], + [4.6, 2.1, 5.1, 4.7], + [3.5, 3.1, 1.7, 5.1], + [4.4, 2.8, 2.9, 3.7], + [2.5, 1.8, 2.6, 5.8], + [1.3, 2.0, 2.0, 4.2], + [5.2, 1.5, 2.1, 4.9], + [5.2, 1.3, 5.5, 5.3], + [3.6, 2.9, 1.4, 1.1], + [4.5, 6.0, 1.7, 3.3], + [4.2, 2.0, 4.0, 4.2], + [2.7, 1.9, 1.8, 2.5], + [1.5, 2.2, 1.4, 2.0], + [4.9, 4.9, 3.1, 5.9], + [4.3, 1.2, 4.1, 4.4], + [2.0, 4.2, 3.5, 3.5], + [1.0, 3.9, 2.8, 1.2], + [2.4, 1.2, 5.6, 5.7], + [4.8, 3.7, 5.9, 2.4], + [1.3, 1.1, 4.0, 5.3], + [4.2, 1.1, 4.4, 2.4], + [2.6, 3.2, 5.3, 1.3], + [3.8, 5.1, 5.0, 4.7], + [5.7, 1.8, 5.4, 2.2], + [4.9, 2.4, 5.8, 3.8], + [2.5, 5.3, 5.3, 2.1], + [5.2, 3.6, 3.5, 1.7], + [4.6, 4.2, 2.3, 5.0], + [3.6, 4.8, 2.3, 4.8], + [3.3, 5.6, 3.2, 6.0], + [3.8, 5.8, 2.8, 3.6], + [2.4, 2.4, 1.2, 3.9], + [5.2, 1.8, 5.1, 5.3], + [4.4, 1.5, 2.8, 3.0], + [4.0, 4.5, 2.8, 6.0], + [4.4, 5.0, 1.7, 1.1], + [1.7, 2.1, 4.8, 4.7], + [5.4, 1.4, 5.3, 2.3], + [5.8, 1.8, 5.5, 4.8], + [4.9, 1.8, 1.3, 3.6], + [5.0, 5.8, 3.9, 5.9], + [4.7, 4.6, 2.7, 2.0], + [2.4, 3.5, 5.4, 3.2], + [5.1, 1.8, 3.2, 4.9], + [5.5, 3.1, 4.3, 3.4], + [3.6, 5.3, 5.5, 4.5], + [2.1, 5.4, 4.6, 6.0], + [1.8, 2.8, 1.6, 4.3], + [2.5, 1.4, 1.3, 2.7], + [4.7, 5.9, 4.6, 3.5], + [2.7, 4.7, 4.2, 3.7], + [4.6, 2.6, 4.4, 5.9], + [3.7, 3.9, 3.2, 5.2], + [5.7, 5.1, 6.0, 4.5], + [4.1, 1.7, 2.9, 1.8], + [5.7, 4.7, 1.4, 2.7], + [2.9, 2.7, 5.6, 2.3], + [3.6, 3.5, 4.4, 1.8], + [2.7, 3.4, 4.8, 1.5], + [1.5, 3.1, 1.8, 3.1], + [1.8, 5.0, 3.6, 5.3], + [3.4, 2.4, 2.5, 1.0], + [3.6, 1.2, 2.3, 4.1], + [3.2, 3.3, 4.9, 4.6], + [2.4, 2.7, 5.9, 2.8], + [3.0, 3.2, 6.0, 1.3], + [2.1, 4.4, 2.6, 3.0], + [3.9, 5.7, 2.9, 4.5], + [3.8, 2.0, 5.1, 4.5], + [4.0, 4.5, 4.0, 3.7], + [2.2, 5.2, 2.9, 4.9], + [5.9, 1.2, 3.3, 2.5], + [4.0, 3.1, 1.3, 3.5], + [4.5, 3.9, 1.5, 4.3], + [3.8, 4.6, 2.5, 5.6], + [4.4, 2.7, 2.6, 2.0], + [5.5, 2.4, 4.5, 1.9], + [2.2, 5.9, 2.3, 5.2], + [3.6, 2.0, 4.9, 3.6], + [2.8, 4.6, 4.4, 3.2], + [2.2, 5.7, 3.1, 4.0], + [1.3, 1.4, 1.9, 5.2], + [2.7, 1.5, 5.7, 2.2], + [4.7, 5.5, 2.9, 4.7], + [3.2, 3.9, 1.7, 4.3], + [4.1, 2.1, 1.4, 3.0], + [5.4, 4.5, 4.9, 4.8], + [2.6, 5.5, 5.2, 1.1], + [1.4, 4.1, 3.5, 4.5], + [1.8, 1.5, 1.6, 3.8], + [2.0, 4.1, 2.5, 3.5], + [4.8, 3.4, 3.6, 2.5], + [4.8, 3.0, 1.3, 5.3], + [3.9, 3.1, 2.9, 4.2], + [5.4, 1.1, 1.0, 3.2], + [1.8, 1.7, 5.8, 2.2], + [5.1, 4.1, 4.3, 4.5], + [2.2, 1.2, 2.8, 2.9], + [3.3, 4.9, 1.7, 1.5], + [5.9, 2.3, 3.5, 4.0], + [3.3, 1.4, 2.2, 6.0], + [4.6, 2.7, 1.8, 2.5], + [2.1, 5.3, 2.5, 2.3], + [5.4, 5.0, 4.0, 3.6], + [4.8, 2.6, 3.0, 3.0], + [4.0, 5.0, 5.3, 1.6], + [3.4, 1.3, 4.7, 4.0], + [4.7, 5.2, 5.2, 1.8], + [1.1, 3.5, 1.4, 4.5], + [2.7, 2.3, 4.8, 1.1], + [4.9, 4.0, 1.3, 1.7], + [4.8, 1.2, 3.0, 4.0], + [1.8, 1.9, 4.0, 5.4], + [4.5, 4.6, 2.5, 2.8], + [5.5, 5.9, 3.0, 2.9], + [3.1, 1.7, 5.2, 5.4], + [4.4, 1.2, 5.4, 1.9], + [4.7, 1.4, 3.2, 5.5], + [3.2, 1.4, 5.2, 2.3], + [2.0, 2.5, 4.8, 2.2], + [4.8, 5.6, 2.3, 5.5], + [4.2, 1.1, 4.7, 4.5], + [1.7, 3.2, 1.2, 1.4], + [2.2, 2.9, 3.4, 1.1], + [4.5, 2.5, 1.8, 2.4], + [2.1, 1.1, 5.5, 5.0], + [1.4, 3.8, 5.5, 4.5], + [3.8, 5.4, 3.4, 3.2], + [1.1, 2.9, 4.7, 2.8], + [2.2, 1.6, 5.9, 4.7], + [4.1, 5.4, 3.1, 1.8], + [1.6, 4.5, 3.6, 2.5], + [2.4, 5.7, 2.8, 5.3], + [5.2, 2.2, 2.3, 2.0], + [2.3, 2.2, 1.7, 5.6], + [4.5, 2.0, 3.3, 4.2], + [3.4, 1.2, 1.5, 3.2], + [2.1, 1.3, 5.7, 5.0], + [2.1, 5.3, 2.8, 4.7], + [2.8, 1.5, 5.1, 1.4], + [4.1, 1.2, 2.7, 3.1], + [2.6, 2.3, 3.6, 1.5], + [3.9, 2.7, 1.8, 2.0], + [5.2, 4.9, 1.9, 1.4], + [5.1, 4.6, 4.5, 4.2], + [4.7, 4.3, 4.2, 5.2], + [5.3, 1.7, 4.0, 5.5], + [2.2, 5.5, 2.9, 3.0], + [5.0, 1.5, 5.5, 3.0], + [2.8, 4.8, 5.0, 4.0], + [1.6, 5.2, 5.8, 3.9], + [5.2, 1.2, 5.3, 4.8], + [5.9, 2.2, 5.5, 5.9], + [5.9, 4.3, 1.5, 3.9], + [5.7, 4.3, 2.7, 1.7], + [3.8, 1.0, 3.7, 2.4], + [3.8, 4.7, 2.8, 1.5], + [3.5, 4.3, 4.5, 4.6], + [2.1, 1.8, 2.1, 3.5], + [1.2, 4.5, 4.2, 2.5], + [5.4, 4.7, 4.1, 2.9], + [1.6, 3.1, 4.2, 3.9], + [2.3, 4.0, 2.5, 1.5], + [4.7, 5.3, 1.2, 1.6], + [3.9, 3.0, 5.7, 5.3], + [2.8, 1.2, 1.8, 1.3], + [5.9, 3.2, 2.4, 4.6], + [3.1, 3.3, 2.7, 3.6], + [2.9, 1.5, 4.2, 5.8], + [3.2, 2.5, 5.2, 3.6], + [5.9, 4.6, 2.7, 4.9], + [4.2, 2.4, 5.4, 2.7], + [3.0, 2.1, 3.6, 4.0], + [4.2, 3.4, 1.4, 6.0], + [4.7, 1.1, 1.4, 5.0], + [4.6, 5.1, 5.0, 1.0], + [2.5, 3.1, 4.0, 1.6], + [2.2, 3.0, 4.6, 5.4], + [5.9, 2.9, 5.5, 1.7], + [5.4, 4.0, 5.0, 5.8], + [6.0, 3.2, 3.2, 1.4], + [5.2, 5.9, 3.4, 2.4], + [4.9, 1.3, 3.4, 1.4], + [2.2, 5.4, 1.8, 4.2], + [1.8, 1.6, 3.3, 5.1], + [5.1, 1.3, 5.7, 2.2], + [1.5, 3.2, 4.9, 4.4], + [3.0, 3.2, 2.2, 3.9], + [6.0, 2.2, 3.7, 3.4], + [2.1, 1.8, 5.7, 3.0], + [3.7, 3.9, 4.9, 6.0], + [4.1, 5.0, 4.3, 4.1], + [4.9, 2.7, 3.2, 6.0], + [2.1, 5.1, 5.0, 4.5], + [5.5, 1.3, 2.6, 4.7], + [5.4, 2.5, 5.0, 2.2], + [5.2, 3.8, 2.9, 2.9], + [5.8, 5.2, 5.6, 1.8], + [1.3, 3.3, 5.8, 1.3], + [2.6, 1.3, 3.8, 5.4], + [1.6, 2.2, 5.7, 3.2], + [5.4, 5.1, 5.9, 5.2], + [2.6, 6.0, 3.0, 4.2], + [1.3, 4.1, 4.6, 2.6], + [1.1, 2.1, 3.9, 5.4], + [3.0, 4.5, 3.9, 5.1], + [1.2, 1.6, 5.6, 2.4], + [3.3, 2.0, 4.5, 2.4], + [3.9, 1.8, 4.7, 3.0], + [5.9, 4.2, 4.8, 2.7], + [5.6, 5.4, 3.2, 4.3], + [4.6, 1.8, 3.3, 4.2], + [4.0, 5.6, 5.6, 2.9], + [2.3, 5.2, 4.1, 1.0], + [5.4, 4.5, 4.9, 2.4], + [1.1, 4.9, 1.7, 3.1], + [1.7, 5.2, 4.3, 4.8], + [2.1, 1.3, 3.0, 1.9], + [2.9, 2.8, 4.9, 5.6], + [3.5, 2.5, 5.5, 4.9], + [2.8, 6.0, 2.4, 1.6], + [5.3, 2.1, 1.8, 2.1], + [4.2, 1.3, 5.3, 4.4], + [1.1, 5.7, 2.1, 1.2], + [3.2, 3.2, 4.0, 4.4], + [4.6, 4.9, 3.1, 2.9], + [4.5, 3.3, 1.5, 1.5], + [5.0, 2.6, 4.6, 5.9], + [1.4, 2.9, 5.2, 5.8], + [4.5, 3.7, 1.6, 1.6], + [5.0, 3.2, 4.4, 1.1], + [6.0, 2.3, 4.9, 4.6], + [1.9, 2.4, 2.0, 4.3], + [1.2, 2.3, 4.4, 6.0], + [4.2, 4.3, 3.8, 5.5], + [5.7, 2.6, 4.7, 4.8], + [4.5, 1.0, 2.2, 2.7], + [4.8, 5.4, 5.7, 5.8], + [1.5, 5.5, 2.4, 3.3], + [5.6, 3.3, 3.0, 3.6], + [3.9, 3.2, 3.0, 3.8], + [3.2, 4.5, 1.3, 1.4], + [3.4, 2.0, 4.7, 1.5], + [2.5, 5.4, 1.7, 2.0], + [2.9, 4.2, 3.0, 4.6], + [2.4, 2.1, 1.8, 4.9], + [4.5, 4.6, 4.8, 5.3], + [4.8, 4.1, 1.8, 2.3], + [5.0, 1.4, 4.3, 3.0], + [2.4, 3.7, 3.6, 5.1], + [5.2, 3.5, 5.0, 2.6], + [2.6, 4.1, 2.3, 4.1], + [5.8, 4.2, 1.7, 1.9], + [3.3, 4.2, 2.9, 5.7], + [1.9, 6.0, 1.6, 5.9], + [4.9, 3.4, 1.8, 1.4], + [3.2, 5.8, 3.2, 4.0], + [4.1, 3.7, 6.0, 1.7], + [1.4, 3.6, 3.5, 1.4], + [5.8, 3.6, 4.2, 4.0], + [2.1, 3.4, 3.6, 4.5], + [1.0, 2.7, 1.4, 1.3], + [2.2, 3.3, 1.4, 2.3], + [4.7, 5.6, 5.2, 3.2], + [1.8, 3.6, 3.7, 2.7], + [1.8, 5.5, 1.9, 3.6], + [4.7, 3.3, 1.6, 5.3], + [2.6, 4.2, 2.6, 3.5], + [4.6, 2.2, 3.2, 3.6], + [3.5, 4.1, 3.0, 3.9], + [4.1, 5.6, 5.7, 4.6], + [1.9, 2.5, 1.2, 2.9], + [1.5, 6.0, 1.5, 3.5], + [1.2, 5.9, 5.6, 5.2], + [1.1, 5.9, 2.4, 4.8], + [3.3, 5.5, 2.2, 4.6], + [5.5, 5.8, 3.0, 1.3], + [2.1, 1.3, 1.3, 1.6], + [2.1, 1.1, 1.4, 5.6], + [4.0, 2.5, 3.5, 3.0], + [1.9, 1.3, 3.9, 5.3], + [5.5, 3.5, 3.6, 5.0], + [4.3, 4.4, 3.5, 3.4], + [5.2, 2.5, 2.1, 2.6], + [3.1, 5.7, 1.4, 2.5], + [4.1, 4.7, 2.0, 5.0], + [5.4, 4.1, 2.1, 1.5], + [1.8, 2.3, 2.1, 4.6], + [2.8, 3.3, 2.7, 3.4], + [1.5, 2.4, 3.2, 3.2], + [3.3, 4.5, 5.9, 2.5], + [5.5, 4.9, 4.1, 4.6], + [3.0, 4.6, 4.1, 2.2], + [4.3, 4.5, 3.0, 1.6], + [3.2, 1.0, 2.3, 1.2], + [1.7, 2.4, 3.2, 2.8], + [1.5, 2.3, 1.4, 4.8], + [5.6, 3.4, 1.1, 5.2], + [3.3, 5.5, 2.5, 1.2], + [2.8, 2.8, 5.9, 2.7], + [5.3, 2.9, 3.0, 3.9], + [4.8, 3.8, 2.4, 2.6], + [1.5, 5.0, 3.8, 2.6], + [4.3, 1.5, 5.7, 5.7], + [1.9, 3.8, 5.9, 3.9], + [5.1, 3.3, 3.3, 1.4], + [2.9, 3.3, 5.4, 2.7], + [3.6, 5.2, 2.2, 4.3], + [1.1, 4.5, 5.8, 1.5], + [2.3, 5.1, 2.8, 4.6], + [4.1, 5.7, 4.2, 1.9], + [3.2, 2.7, 3.2, 5.3], + [5.4, 3.1, 2.2, 3.8], + [5.6, 5.5, 2.9, 4.1], + [5.5, 4.6, 1.5, 3.7], + [3.1, 5.0, 1.2, 1.2], + [5.7, 5.5, 5.1, 3.2], + [4.2, 5.7, 4.2, 5.1], + [4.8, 5.6, 2.7, 1.3], + [5.3, 2.7, 5.9, 3.7], + [2.9, 1.1, 5.4, 2.2], + [2.5, 4.7, 4.1, 4.8], + [5.0, 3.4, 1.6, 2.8], + [2.2, 5.0, 4.9, 5.8], + [1.4, 4.2, 2.8, 2.1], + [3.6, 4.9, 5.3, 5.2], + [4.2, 4.9, 4.7, 6.0], + [4.7, 3.6, 2.0, 2.6], + [3.0, 5.1, 3.4, 4.9], + [5.2, 3.6, 3.3, 1.2], + [1.5, 3.7, 5.2, 3.0], + [2.1, 2.3, 4.8, 2.9], + [3.4, 1.6, 5.4, 3.0], + [5.9, 5.0, 2.3, 5.9], + [2.1, 3.0, 3.4, 3.0], + [5.2, 5.8, 1.8, 2.3], + [5.8, 5.3, 2.2, 4.7], + [6.0, 2.8, 5.1, 2.2], + [5.8, 2.3, 4.4, 3.4], + [1.2, 5.2, 5.2, 3.7], + [5.6, 3.7, 4.1, 2.0], + [3.6, 2.5, 3.1, 1.3], + [4.6, 2.3, 4.2, 3.2], + [5.6, 5.0, 1.3, 3.4], + [3.9, 4.3, 3.8, 5.6], + [1.8, 1.1, 3.4, 5.4], + [1.3, 2.9, 3.5, 4.7], + [1.8, 2.1, 4.1, 2.5], + [3.7, 4.9, 2.4, 4.1], + [4.5, 4.9, 3.5, 3.4], + [5.4, 2.3, 5.8, 1.5], + [2.6, 2.3, 3.0, 3.4], + [3.2, 3.7, 5.3, 2.8], + [4.0, 1.8, 2.8, 1.7], + [2.8, 5.1, 3.0, 1.6], + [4.9, 4.7, 4.8, 2.3], + [6.0, 4.7, 5.6, 4.8], + [4.4, 3.3, 1.2, 2.4], + [5.6, 4.0, 5.7, 4.7], + [3.2, 1.8, 3.4, 3.9], + [4.6, 4.9, 3.7, 4.5], + [2.7, 2.9, 4.5, 5.2], + [5.4, 1.4, 1.7, 5.8], + [3.6, 1.8, 2.2, 6.0], + [6.0, 4.3, 1.0, 1.1], + [5.6, 2.8, 4.5, 3.6], + [1.2, 3.4, 4.2, 2.2], + [1.5, 2.3, 1.2, 3.0], + [1.8, 5.5, 3.2, 2.9], + [1.3, 3.3, 2.0, 4.0], + [1.2, 1.1, 2.6, 3.5], + [4.7, 5.7, 4.7, 1.1], + [3.6, 4.9, 2.8, 4.9], + [3.0, 1.6, 1.2, 4.8], + [5.9, 4.7, 5.4, 5.3], + [2.1, 5.5, 4.5, 4.9], + [3.7, 2.8, 3.0, 4.1], + [5.6, 5.4, 5.1, 3.7], + [3.5, 2.2, 5.2, 1.0], + [2.4, 1.8, 3.8, 2.7], + [2.1, 2.5, 4.5, 3.7], + [2.0, 1.1, 2.0, 1.9], + [6.0, 5.7, 2.4, 3.0], + [3.6, 3.9, 1.7, 3.0], + [2.2, 2.4, 1.4, 3.8], + [4.0, 1.8, 5.9, 3.9], + [1.9, 2.9, 3.3, 4.1], + [5.3, 1.8, 1.4, 5.4], + [5.2, 3.7, 1.4, 4.2], + [2.5, 5.4, 5.6, 5.8], + [4.1, 1.5, 1.9, 2.8], + [4.1, 1.9, 1.4, 3.6], + [1.0, 5.6, 1.5, 2.6], + [3.8, 5.5, 4.4, 4.0], + [5.5, 1.2, 5.1, 4.7], + [4.8, 2.1, 2.0, 4.4], + [5.9, 5.7, 2.0, 1.3], + [3.3, 4.9, 4.8, 6.0], + [3.3, 5.8, 1.4, 5.6], + [6.0, 5.6, 3.0, 3.6], + [1.5, 1.4, 5.0, 3.0], + [2.1, 3.2, 4.8, 1.8], + [5.2, 4.5, 2.8, 2.8], + [4.5, 5.5, 1.1, 3.7], + [4.3, 5.6, 2.2, 5.1], + [3.0, 4.1, 2.1, 1.4], + [2.9, 4.7, 4.7, 2.9], + [5.8, 2.3, 5.2, 2.8], + [3.5, 3.3, 5.3, 5.6], + [5.1, 2.9, 1.1, 5.8], + [3.1, 3.1, 3.0, 3.5], + [5.7, 1.7, 2.8, 1.2], + [5.0, 5.6, 3.0, 4.8], + [3.8, 4.5, 3.6, 3.4], + [4.1, 2.7, 4.6, 4.3], + [3.9, 5.3, 5.1, 2.0], + [1.3, 5.4, 5.5, 4.9], + [2.2, 5.8, 2.5, 1.3], + [2.7, 5.2, 2.1, 3.9], + [3.8, 4.0, 5.1, 1.3], + [5.9, 2.4, 3.9, 5.8], + [3.8, 3.6, 5.9, 2.9], + [1.4, 1.7, 2.8, 2.7], + [5.9, 3.1, 1.6, 2.1], + [2.9, 4.9, 2.4, 4.5], + [5.2, 5.5, 4.7, 5.2], + [4.1, 2.6, 5.3, 5.8], + [4.1, 3.9, 2.3, 1.3], + [2.0, 4.9, 5.0, 1.5], + [2.0, 2.5, 2.1, 4.7], + [4.2, 6.0, 3.3, 4.0], + [3.5, 5.6, 5.1, 5.8], + [5.6, 2.2, 1.9, 6.0], + [4.5, 4.6, 5.3, 3.7], + [3.9, 1.4, 3.9, 3.6], + [5.5, 3.1, 1.7, 1.7], + [1.8, 2.9, 1.8, 3.5], + [5.4, 6.0, 2.9, 1.9], + [3.1, 4.2, 1.4, 4.6], + [2.9, 4.9, 1.8, 3.6], + [3.8, 5.7, 1.6, 2.3], + [6.0, 5.4, 5.9, 1.9], + [3.6, 3.8, 5.1, 4.9], + [2.2, 3.4, 4.1, 3.2], + [3.6, 5.2, 4.1, 3.1], + [5.8, 1.2, 5.3, 1.7], + [3.5, 4.8, 2.6, 4.7], + [5.3, 4.5, 5.5, 5.9], + [2.9, 2.8, 3.1, 3.7], + [1.1, 4.9, 4.5, 1.2], + [4.7, 1.1, 1.2, 1.3], + [5.8, 4.8, 1.9, 4.1], + [2.7, 5.5, 3.2, 4.7], + [4.2, 4.8, 1.5, 3.1], + [3.5, 3.1, 4.2, 1.2], + [3.6, 4.9, 5.9, 1.4], + [1.5, 2.2, 1.8, 2.3], + [4.0, 5.3, 1.8, 1.7], + [2.9, 5.0, 5.2, 1.3], + [1.7, 5.0, 2.7, 3.5], + [3.7, 3.2, 5.1, 2.6], + [3.1, 4.1, 4.3, 1.7], + [2.2, 5.1, 3.7, 1.4], + [5.6, 5.9, 2.1, 3.4], + [4.2, 1.4, 1.5, 1.1], + [1.5, 3.0, 1.3, 5.4], + [2.5, 1.2, 1.9, 1.3], + [1.1, 2.4, 2.8, 2.8], + [2.4, 3.1, 4.8, 3.1], + [4.5, 3.0, 3.4, 1.1], + [2.7, 2.7, 2.6, 4.9], + [3.9, 4.7, 1.1, 4.9], + [2.2, 4.3, 6.0, 2.1], + [4.5, 1.4, 3.6, 4.7], + [5.7, 3.2, 5.3, 4.2], + [2.7, 1.5, 3.3, 4.9], + [2.0, 2.9, 2.8, 4.8], + [5.6, 2.4, 5.5, 2.5], + [3.0, 4.6, 3.4, 1.5], + [4.8, 1.8, 4.5, 5.2], + [2.8, 4.4, 4.3, 1.4], + [2.8, 5.0, 2.1, 1.7], + [2.5, 3.6, 1.7, 4.4], + [1.8, 5.6, 1.6, 1.4], + [1.8, 2.6, 5.4, 2.2], + [2.7, 4.4, 3.6, 3.6], + [3.7, 1.6, 2.0, 3.1], + [4.1, 5.1, 3.1, 5.4], + [2.5, 3.2, 4.8, 4.0], + [1.0, 5.2, 1.7, 3.4], + [1.8, 2.8, 2.9, 4.9], + [1.8, 1.6, 1.1, 3.1], + [5.4, 6.0, 5.6, 1.5], + [4.8, 5.0, 3.0, 4.2], + [2.5, 3.6, 3.3, 4.9], + [1.6, 3.3, 5.5, 5.2], + [3.2, 1.1, 2.4, 1.5], + [2.3, 4.2, 4.3, 2.3], + [2.8, 5.7, 4.9, 5.6], + [5.7, 5.0, 5.7, 1.3], + [4.7, 3.8, 5.4, 3.2], + [5.2, 5.9, 5.5, 4.4], + [2.9, 2.9, 1.3, 4.7], + [2.2, 5.1, 5.6, 3.7], + [2.7, 4.4, 2.4, 3.2], + [4.7, 5.1, 4.6, 5.3], + [4.9, 5.1, 1.2, 3.4], + [4.7, 3.7, 1.3, 5.2], + [2.8, 4.9, 1.5, 1.4], + [2.6, 5.9, 2.1, 1.5], + [1.2, 5.0, 5.1, 3.5], + [5.2, 2.7, 4.5, 3.2], + [5.0, 3.3, 5.3, 1.4], + [2.0, 2.9, 2.9, 2.3], + [2.0, 2.5, 3.4, 1.3], + [3.2, 5.8, 3.4, 4.0], + [2.3, 2.6, 1.9, 1.5], + [4.3, 4.9, 3.2, 1.4], + [3.5, 1.6, 4.4, 4.5], + [3.2, 2.2, 4.2, 1.9], + [3.5, 4.2, 1.8, 4.9], + [1.5, 2.9, 4.6, 4.2], + [5.2, 4.9, 2.0, 1.4], + [5.0, 2.0, 1.6, 4.8], + [1.6, 5.0, 1.1, 5.1], + [4.2, 2.5, 5.1, 4.8], + [3.6, 3.6, 1.9, 2.6], + [1.4, 4.3, 1.5, 3.4], + [3.3, 4.2, 2.8, 6.0], + [4.0, 2.1, 3.6, 2.5], + [3.0, 5.6, 5.4, 2.1], + [3.0, 4.9, 1.8, 4.4], + [4.7, 5.1, 2.7, 3.4], + [4.3, 5.9, 1.9, 4.2], + [3.2, 3.8, 3.0, 2.8], + [2.5, 2.4, 2.2, 3.2], + [4.9, 5.0, 5.4, 5.2], + [5.4, 1.9, 4.6, 2.0], + [3.2, 5.2, 2.9, 2.4], + [3.6, 1.4, 3.2, 2.0], + [3.4, 2.0, 3.0, 1.0], + [5.2, 2.0, 1.8, 4.0], + [5.6, 4.3, 4.5, 5.2], + [3.5, 2.4, 2.4, 1.2], + [1.4, 5.3, 5.3, 5.3], + [2.8, 3.5, 2.4, 2.0], + [6.0, 3.9, 2.6, 5.4], + [3.9, 4.3, 5.0, 2.0], + [2.4, 4.6, 3.1, 5.3], + [3.1, 2.8, 5.0, 1.0], + [5.0, 2.9, 1.5, 5.1], + [1.5, 5.9, 1.1, 4.8], + [4.0, 2.4, 5.2, 5.1], + [3.2, 2.7, 4.9, 3.8], + [3.5, 2.3, 4.7, 4.2], + [4.4, 3.1, 4.0, 4.8], + [4.8, 5.7, 3.8, 4.5], + [2.2, 3.0, 4.5, 1.9], + [4.3, 2.5, 5.8, 5.0], + [5.8, 4.8, 3.7, 2.0], + [3.2, 2.9, 4.8, 1.7], + [5.3, 2.3, 5.3, 4.0], + [3.7, 1.6, 2.3, 6.0], + [1.4, 4.8, 4.5, 2.4], + [5.5, 6.0, 4.6, 2.9], + [5.9, 3.4, 1.6, 5.3], + [4.9, 4.7, 1.7, 3.6], + [3.6, 4.2, 5.5, 3.7], + [3.3, 5.5, 1.9, 4.4], + [2.9, 4.7, 1.5, 1.9], + [2.3, 1.7, 3.8, 3.0], + [4.2, 5.7, 2.6, 3.5], + [2.6, 5.0, 3.0, 2.5], + [2.5, 5.5, 5.8, 2.3], + [1.9, 2.6, 4.5, 5.9], + [1.8, 1.4, 1.8, 1.9], + [1.2, 4.5, 5.2, 2.1], + [3.7, 5.9, 4.6, 5.4], + [5.9, 5.8, 4.3, 1.4], + [3.2, 3.6, 1.8, 3.4], + [2.9, 4.8, 3.8, 4.0], + [4.9, 5.1, 2.2, 3.2], + [3.1, 2.9, 3.5, 4.8], + [5.7, 2.0, 4.8, 1.0], + [3.1, 4.3, 5.0, 5.8], + [1.9, 3.2, 2.4, 2.3], + [4.9, 1.3, 2.9, 3.7], + [3.1, 3.8, 5.9, 2.4], + [4.5, 3.7, 1.9, 3.6], + [3.1, 5.5, 2.1, 3.4], + [5.8, 3.1, 3.0, 5.3], + [4.8, 4.2, 2.3, 4.9], + [3.3, 5.8, 2.3, 1.7], + [2.6, 4.5, 3.2, 5.7], + [2.6, 4.6, 3.1, 3.2], + [3.5, 1.3, 5.4, 3.4], + [5.2, 2.5, 5.3, 5.3], + [3.5, 1.4, 3.0, 2.6], + [2.9, 1.4, 3.2, 2.0], + [4.6, 3.6, 3.2, 5.2], + [4.8, 3.8, 2.5, 3.3], + [3.8, 3.0, 5.5, 3.0], + [6.0, 4.0, 3.0, 4.0], + [5.4, 2.1, 5.0, 3.1], + [2.3, 5.6, 1.5, 5.4], + [5.3, 4.0, 3.4, 2.6], + [4.8, 3.4, 2.5, 5.6], + [4.7, 3.4, 3.6, 5.9], + [3.9, 2.8, 4.1, 3.4], + [3.7, 1.6, 5.6, 1.3], + [3.1, 4.4, 4.8, 3.9], + [1.0, 4.4, 5.3, 1.3], + [1.2, 2.3, 1.1, 3.5], + [2.5, 3.4, 3.6, 5.7], + [3.6, 1.0, 3.4, 2.6], + [3.9, 5.2, 1.7, 5.8], + [3.6, 4.0, 4.2, 1.3], + [4.8, 1.2, 2.2, 5.9], + [4.2, 5.3, 3.1, 3.6], + [4.6, 1.3, 4.4, 4.3], + [3.3, 1.7, 5.0, 5.3], + [1.4, 2.8, 1.9, 1.8], + [3.0, 5.2, 4.0, 3.3], + [4.6, 4.1, 1.3, 2.5], + [2.2, 1.7, 4.5, 4.7], + [2.7, 5.2, 3.8, 4.7], + [1.7, 4.3, 3.5, 3.9], + [5.2, 5.1, 2.2, 1.4], + [4.9, 1.8, 2.0, 2.8], + [5.8, 3.3, 3.2, 4.6], + [4.6, 4.2, 3.2, 5.6], + [1.9, 1.1, 2.0, 4.7], + [3.9, 4.5, 4.9, 4.8], + [2.0, 4.6, 1.9, 2.0], + [4.8, 3.3, 1.6, 1.9], + [4.5, 5.5, 1.5, 5.3], + [1.7, 2.7, 4.6, 2.0], + [5.0, 4.6, 5.3, 2.7], + [5.7, 3.0, 4.1, 5.1], + [3.9, 1.9, 2.6, 1.5], + [3.5, 4.1, 2.5, 2.5], + [1.8, 3.0, 2.0, 4.2], + [2.8, 4.7, 2.9, 1.8], + [5.5, 4.5, 5.0, 4.5], + [2.0, 2.2, 2.9, 1.6], + [4.2, 1.0, 4.2, 2.6], + [5.3, 4.0, 1.3, 1.1], + [1.8, 1.7, 3.1, 4.4], + [4.3, 1.0, 2.9, 1.2], + [5.1, 2.6, 2.1, 2.2], + [5.2, 5.4, 4.5, 3.3], + [6.0, 3.1, 5.8, 5.3], + [3.4, 1.4, 1.1, 3.5], + [4.1, 5.2, 1.2, 3.2], + [4.6, 3.3, 3.6, 4.1], + [2.1, 4.3, 3.0, 4.2], + [2.5, 1.8, 5.1, 2.1], + [3.7, 3.1, 4.7, 4.4], + [4.4, 1.3, 5.9, 3.2], + [4.3, 2.2, 4.4, 2.8], + [2.8, 1.7, 4.3, 1.5], + [3.5, 1.7, 5.6, 4.6], + [1.6, 1.6, 4.9, 5.9], + [2.6, 2.9, 1.2, 3.7], + [2.4, 4.4, 4.6, 1.5], + [3.0, 4.1, 3.1, 4.7], + [1.1, 5.0, 5.6, 5.8], + [2.8, 1.3, 4.0, 5.4], + [2.4, 2.7, 2.3, 4.1], + [5.8, 2.6, 4.0, 5.9], + [5.0, 4.1, 4.0, 4.6], + [3.6, 2.0, 2.9, 1.7], + [4.3, 5.1, 2.1, 1.4], + [5.7, 4.2, 4.7, 2.9], + [2.9, 5.6, 5.8, 2.0], + [3.6, 4.7, 3.1, 4.9], + [1.6, 2.8, 4.4, 1.8], + [1.7, 1.7, 2.0, 3.9] +] diff --git a/MLOps/serving_patterns/batch_pattern/models/iris_svc.onnx b/MLOps/serving_patterns/batch_pattern/models/iris_svc.onnx new file mode 100644 index 0000000..13dcabf Binary files /dev/null and b/MLOps/serving_patterns/batch_pattern/models/iris_svc.onnx differ diff --git a/MLOps/serving_patterns/batch_pattern/models/label.json b/MLOps/serving_patterns/batch_pattern/models/label.json new file mode 100644 index 0000000..da38d65 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/models/label.json @@ -0,0 +1,5 @@ +{ + "0": "setosa", + "1": "versicolor", + "2": "virginica" +} diff --git a/MLOps/serving_patterns/batch_pattern/requirements.txt b/MLOps/serving_patterns/batch_pattern/requirements.txt new file mode 100644 index 0000000..6c7ecc8 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/requirements.txt @@ -0,0 +1,25 @@ +fastapi>=0.65.2 +future>=0.18.2 +gunicorn>=20.0.4 +importlib-metadata>=1.7.0 +joblib>=0.15.1 +numpy>=1.18.5 +onnx>=1.7.0 +onnxruntime>=1.4.0 +Pillow>=8.3.2 +psutil>=5.7.0 +pydantic>=1.8.2 +PyYAML>=5.3.1 +redis>=3.5.3 +scikit-learn>=0.23.1 +skl2onnx>=1.7.0 +starlette>=0.13.4 +typing>=3.7.4.1 +uvicorn>=0.11.7 +uvloop>=0.14.0 +httptools>=0.1.1 +python-json-logger>=2.0.1 +loguru>=0.5.3 +sqlalchemy>=1.3.18 +alembic>=1.4.2 +mysqlclient>=2.0.1 diff --git a/MLOps/serving_patterns/batch_pattern/run.sh b/MLOps/serving_patterns/batch_pattern/run.sh new file mode 100644 index 0000000..6120f22 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/run.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -eu + +HOST=${HOST:-"0.0.0.0"} +PORT=${PORT:-8000} +WORKERS=${WORKERS:-4} +UVICORN_WORKER=${UVICORN_WORKER:-"uvicorn.workers.UvicornWorker"} +LOGLEVEL=${LOGLEVEL:-"debug"} +LOGCONFIG=${LOGCONFIG:-"./src/utils/logging.conf"} +BACKLOG=${BACKLOG:-2048} +LIMIT_MAX_REQUESTS=${LIMIT_MAX_REQUESTS:-65536} +MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-2048} +GRACEFUL_TIMEOUT=${GRACEFUL_TIMEOUT:-10} +APP_NAME=${APP_NAME:-"src.api.app:app"} + +gunicorn ${APP_NAME} \ + -b ${HOST}:${PORT} \ + -w ${WORKERS} \ + -k ${UVICORN_WORKER} \ + --log-level ${LOGLEVEL} \ + --log-config ${LOGCONFIG} \ + --backlog ${BACKLOG} \ + --max-requests ${LIMIT_MAX_REQUESTS} \ + --max-requests-jitter ${MAX_REQUESTS_JITTER} \ + --graceful-timeout ${GRACEFUL_TIMEOUT} \ + --reload diff --git a/MLOps/serving_patterns/batch_pattern/src/__init__.py b/MLOps/serving_patterns/batch_pattern/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/batch_pattern/src/api/__init__.py b/MLOps/serving_patterns/batch_pattern/src/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/batch_pattern/src/api/app.py b/MLOps/serving_patterns/batch_pattern/src/api/app.py new file mode 100644 index 0000000..8d8b1f7 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/api/app.py @@ -0,0 +1,19 @@ +from logging import getLogger + +from fastapi import FastAPI +from src.api import routers +from src.configurations import APIConfigurations +from src.db import initialize +from src.db.database import engine + +logger = getLogger(__name__) + +initialize.initialize_database(engine=engine, checkfirst=True) + +app = FastAPI( + title=APIConfigurations.title, + description=APIConfigurations.description, + version=APIConfigurations.version, +) + +app.include_router(routers.router, prefix="", tags=[""]) diff --git a/MLOps/serving_patterns/batch_pattern/src/api/routers.py b/MLOps/serving_patterns/batch_pattern/src/api/routers.py new file mode 100644 index 0000000..8c1fd17 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/api/routers.py @@ -0,0 +1,29 @@ +from logging import getLogger + +from fastapi import APIRouter, Depends +from sqlalchemy.orm import Session +from src.db import cruds +from src.db.database import get_db + +logger = getLogger(__name__) +router = APIRouter() + + +@router.get("/health") +def health(): + return {"health": "ok"} + + +@router.get("/data/all") +def data_all(db: Session = Depends(get_db)): + return cruds.select_all_items(db=db) + + +@router.get("/data/predicted") +def data_predicted(db: Session = Depends(get_db)): + return cruds.select_with_prediction(db=db) + + +@router.get("/data/unpredicted") +def data_unpredicted(db: Session = Depends(get_db)): + return cruds.select_without_prediction(db=db) diff --git a/MLOps/serving_patterns/batch_pattern/src/configurations.py b/MLOps/serving_patterns/batch_pattern/src/configurations.py new file mode 100644 index 0000000..9779fdb --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/configurations.py @@ -0,0 +1,41 @@ +import json +import os +from logging import getLogger + +from src.constants import CONSTANTS, PLATFORM_ENUM + +logger = getLogger(__name__) + + +class PlatformConfigurations: + platform = os.getenv("PLATFORM", PLATFORM_ENUM.DOCKER.value) + if not PLATFORM_ENUM.has_value(platform): + raise ValueError(f"PLATFORM must be one of {[v.value for v in PLATFORM_ENUM.__members__.values()]}") + + mysql_username = os.getenv("MYSQL_USER") + mysql_password = os.getenv("MYSQL_PASSWORD") + mysql_port = int(os.getenv("MYSQL_PORT", 3306)) + mysql_database = os.getenv("MYSQL_DATABASE", "sample_db") + mysql_server = os.getenv("MYSQL_SERVER") + sql_alchemy_database_url = ( + f"mysql://{mysql_username}:{mysql_password}@{mysql_server}:{mysql_port}/{mysql_database}?charset=utf8" + ) + + sample_data_path = os.getenv("SAMPLE_DATA_PATH", "models/data.json") + with open(sample_data_path, "r") as f: + sample_data = json.load(f) + + +class APIConfigurations: + title = os.getenv("API_TITLE", "ServingPattern") + description = os.getenv("API_DESCRIPTION", "machine learning system serving patterns") + version = os.getenv("API_VERSION", "0.1") + + +class ModelConfigurations: + model_filepath = os.getenv("MODEL_FILEPATH") + label_filepath = os.getenv("LABEL_FILEPATH") + + +logger.info(f"{APIConfigurations.__name__}: {APIConfigurations.__dict__}") +logger.info(f"{ModelConfigurations.__name__}: {ModelConfigurations.__dict__}") diff --git a/MLOps/serving_patterns/batch_pattern/src/constants.py b/MLOps/serving_patterns/batch_pattern/src/constants.py new file mode 100644 index 0000000..23a0075 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/constants.py @@ -0,0 +1,35 @@ +import enum + + +class PLATFORM_ENUM(enum.Enum): + DOCKER = "docker" + DOCKER_COMPOSE = "docker_compose" + KUBERNETES = "kubernetes" + TEST = "test" + + @staticmethod + def has_value(item): + return item in [v.value for v in PLATFORM_ENUM.__members__.values()] + + +def constant(f): + def fset(self, value): + raise TypeError + + def fget(self): + return f() + + return property(fget, fset) + + +class _Constants(object): + @constant + def REDIS_INCREMENTS(): + return "increments" + + @constant + def REDIS_QUEUE(): + return "redis_queue" + + +CONSTANTS = _Constants() diff --git a/MLOps/serving_patterns/batch_pattern/src/db/__init__.py b/MLOps/serving_patterns/batch_pattern/src/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/batch_pattern/src/db/cruds.py b/MLOps/serving_patterns/batch_pattern/src/db/cruds.py new file mode 100644 index 0000000..088f137 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/db/cruds.py @@ -0,0 +1,43 @@ +import datetime +from typing import Dict, List + +from sqlalchemy.orm import Session +from src.db import models, schemas + + +def select_all_items(db: Session) -> List[schemas.Item]: + return db.query(models.Item).all() + + +def select_without_prediction(db: Session) -> List[schemas.Item]: + return db.query(models.Item).filter(models.Item.prediction == None).all() + + +def select_with_prediction(db: Session) -> List[schemas.Item]: + return db.query(models.Item).filter(models.Item.prediction != None).all() + + +def select_by_id(db: Session, id: int) -> schemas.Item: + return db.query(models.Item).filter(models.Item.id == id).first() + + +def register_item(db: Session, item: schemas.ItemBase, commit: bool = True): + _item = models.Item(values=item.values) + db.add(_item) + if commit: + db.commit() + db.refresh(_item) + + +def register_items(db: Session, items: List[schemas.ItemBase], commit: bool = True): + for item in items: + register_item(db=db, item=item, commit=commit) + + +def register_predictions(db: Session, predictions: Dict[int, Dict[str, float]], commit: bool = True): + for id, prediction in predictions.items(): + item = select_by_id(db=db, id=id) + item.prediction = prediction + if commit: + db.commit() + db.refresh(item) diff --git a/MLOps/serving_patterns/batch_pattern/src/db/database.py b/MLOps/serving_patterns/batch_pattern/src/db/database.py new file mode 100644 index 0000000..c1add9a --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/db/database.py @@ -0,0 +1,40 @@ +import os +from contextlib import contextmanager + +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from src.configurations import PlatformConfigurations + +engine = create_engine( + PlatformConfigurations.sql_alchemy_database_url, + encoding="utf-8", + pool_recycle=3600, + echo=False, +) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +Base = declarative_base() + + +def get_db(): + db = SessionLocal() + try: + yield db + except: + db.rollback() + raise + finally: + db.close() + + +@contextmanager +def get_context_db(): + db = SessionLocal() + try: + yield db + except: + db.rollback() + raise + finally: + db.close() diff --git a/MLOps/serving_patterns/batch_pattern/src/db/initialize.py b/MLOps/serving_patterns/batch_pattern/src/db/initialize.py new file mode 100644 index 0000000..2dbecc6 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/db/initialize.py @@ -0,0 +1,15 @@ +from logging import getLogger + +from src.configurations import PlatformConfigurations +from src.db import cruds, models, schemas +from src.db.database import get_context_db + +logger = getLogger(__name__) + + +def initialize_database(engine, checkfirst: bool = True): + models.create_tables(engine=engine, checkfirst=checkfirst) + with get_context_db() as db: + sample_data = PlatformConfigurations.sample_data + items = [schemas.ItemBase(values=values) for values in sample_data] + cruds.register_items(db=db, items=items, commit=True) diff --git a/MLOps/serving_patterns/batch_pattern/src/db/models.py b/MLOps/serving_patterns/batch_pattern/src/db/models.py new file mode 100644 index 0000000..14e10bd --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/db/models.py @@ -0,0 +1,44 @@ +from logging import getLogger + +from sqlalchemy import Boolean, Column, Integer +from sqlalchemy.dialects.mysql import TIMESTAMP +from sqlalchemy.sql.expression import text +from sqlalchemy.sql.functions import current_timestamp +from sqlalchemy.types import JSON + +logger = getLogger(__name__) + +from src.db.database import Base + + +class Item(Base): + __tablename__ = "items" + + id = Column( + Integer, + primary_key=True, + autoincrement=True, + ) + values = Column( + JSON, + nullable=False, + ) + prediction = Column( + JSON, + nullable=True, + ) + created_datetime = Column( + TIMESTAMP, + server_default=current_timestamp(), + nullable=False, + ) + updated_datetime = Column( + TIMESTAMP, + server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"), + nullable=False, + ) + + +def create_tables(engine, checkfirst: bool = True): + logger.info("Initialize table") + Base.metadata.create_all(engine, checkfirst=checkfirst) diff --git a/MLOps/serving_patterns/batch_pattern/src/db/schemas.py b/MLOps/serving_patterns/batch_pattern/src/db/schemas.py new file mode 100644 index 0000000..bf16d92 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/db/schemas.py @@ -0,0 +1,22 @@ +import datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel + + +class ItemBase(BaseModel): + values: List[float] + + +class ItemCreate(ItemBase): + pass + + +class Item(ItemBase): + id: int + prediction: Optional[Dict[str, float]] + created_datetime: datetime.datetime + updated_datetime: datetime.datetime + + class Config: + orm_mode = True diff --git a/MLOps/serving_patterns/batch_pattern/src/ml/__init__.py b/MLOps/serving_patterns/batch_pattern/src/ml/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/batch_pattern/src/ml/prediction.py b/MLOps/serving_patterns/batch_pattern/src/ml/prediction.py new file mode 100644 index 0000000..96354f3 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/ml/prediction.py @@ -0,0 +1,62 @@ +import json +from logging import getLogger +from typing import Dict, List, Sequence + +import numpy as np +import onnxruntime as rt +from pydantic import BaseModel +from src.configurations import ModelConfigurations + +logger = getLogger(__name__) + + +class Data(BaseModel): + data: List[List[float]] = [[5.1, 3.5, 1.4, 0.2]] + + +class Classifier(object): + def __init__( + self, + model_filepath: str, + label_filepath: str, + ): + self.model_filepath: str = model_filepath + self.label_filepath: str = label_filepath + self.classifier = None + self.label: Dict[str, str] = {} + self.input_name: str = "" + self.output_name: str = "" + + self.load_model() + self.load_label() + + def load_model(self): + logger.info(f"load model in {self.model_filepath}") + self.classifier = rt.InferenceSession(self.model_filepath) + self.input_name = self.classifier.get_inputs()[0].name + self.output_name = self.classifier.get_outputs()[0].name + logger.info(f"initialized model") + + def load_label(self): + logger.info(f"load label in {self.label_filepath}") + with open(self.label_filepath, "r") as f: + self.label = json.load(f) + logger.info(f"label: {self.label}") + + def predict(self, data: List[List[int]]) -> np.ndarray: + np_data = np.array(data).astype(np.float32) + prediction = self.classifier.run(None, {self.input_name: np_data}) + output = np.array(list(prediction[1][0].values())) + logger.info(f"predict proba {output}") + return output + + def predict_label(self, data: List[List[int]]) -> str: + prediction = self.predict(data=data) + argmax = int(np.argmax(np.array(prediction))) + return self.label[str(argmax)] + + +classifier = Classifier( + model_filepath=ModelConfigurations().model_filepath, + label_filepath=ModelConfigurations().label_filepath, +) diff --git a/MLOps/serving_patterns/batch_pattern/src/task/__init__.py b/MLOps/serving_patterns/batch_pattern/src/task/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/batch_pattern/src/task/job.py b/MLOps/serving_patterns/batch_pattern/src/task/job.py new file mode 100644 index 0000000..5f97d80 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/task/job.py @@ -0,0 +1,45 @@ +import time +from concurrent.futures import ThreadPoolExecutor +from logging import DEBUG, Formatter, StreamHandler, getLogger +from typing import Tuple + +import numpy as np +from src.db import cruds, schemas +from src.db.database import get_context_db +from src.ml.prediction import classifier + +logger = getLogger(__name__) +logger.setLevel(DEBUG) +strhd = StreamHandler() +strhd.setFormatter(Formatter("%(asctime)s %(levelname)8s %(message)s")) +logger.addHandler(strhd) + + +def predict(item: schemas.Item) -> Tuple[int, np.ndarray]: + prediction = classifier.predict(data=[item.values]) + logger.debug(f"prediction log: {item.id} {item.values} {prediction}") + return item.id, prediction + + +def main(): + logger.info("waiting for batch to start") + time.sleep(60) + logger.info("starting batch") + with get_context_db() as db: + data = cruds.select_without_prediction(db=db) + logger.info(f"predict data size: {len(data)}") + predictions = {} + with ThreadPoolExecutor(4) as executor: + results = executor.map(predict, data) + for result in results: + predictions[result[0]] = list(result[1]) + cruds.register_predictions( + db=db, + predictions=predictions, + commit=True, + ) + logger.info("finished batch") + + +if __name__ == "__main__": + main() diff --git a/MLOps/serving_patterns/batch_pattern/src/utils/__init__.py b/MLOps/serving_patterns/batch_pattern/src/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/batch_pattern/src/utils/logging.conf b/MLOps/serving_patterns/batch_pattern/src/utils/logging.conf new file mode 100644 index 0000000..490b0c1 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/utils/logging.conf @@ -0,0 +1,78 @@ +[loggers] +keys=root, gunicorn.error, gunicorn.access, uvicorn.error, uvicorn.access + +[logger_root] +level=DEBUG +handlers=console + +[logger_gunicorn.error] +level=DEBUG +handlers=gunicorn_error_file, console +propagate=1 +qualname=gunicorn.error + +[logger_gunicorn.access] +level=INFO +handlers=gunicorn_access_file, console +propagate=1 +qualname=gunicorn.access + +[logger_uvicorn.error] +level=DEBUG +handlers=uvicorn_error_file, console +propagate=1 +qualname=uvicorn.error + +[logger_uvicorn.access] +level=INFO +handlers=uvicorn_access_file, console +propagate=1 +qualname=gunicorn.access + + +[handlers] +keys=console, gunicorn_error_file, gunicorn_access_file, uvicorn_error_file, uvicorn_access_file + +[handler_console] +class=StreamHandler +formatter=generic +args=(sys.stdout, ) + +[handler_gunicorn_error_file] +class=logging.FileHandler +formatter=generic +args=('/var/log/gunicorn_error.log', 'a') + +[handler_gunicorn_access_file] +class=logging.FileHandler +formatter=gunicorn_access +args=('/var/log/gunicorn_access.log', 'a') + +[handler_uvicorn_error_file] +class=logging.FileHandler +formatter=generic +args=('/var/log/uvicorn_error.log', 'a') + +[handler_uvicorn_access_file] +class=logging.FileHandler +formatter=uvicorn_access +args=('/var/log/uvicorn_access.log', 'a') + + +[formatters] +keys=generic, gunicorn_access, uvicorn_access + +[formatter_generic] +class=logging.Formatter +format=[%(asctime)s] [%(levelname)s] [%(process)d] [%(name)s] [%(funcName)s] [%(lineno)d] %(message)s +datefmt=%Y-%m-%d %H:%M:%S + +[formatter_gunicorn_access] +class=logging.Formatter +format=[%(asctime)s] %(h)s %(l)s %(u)s %(t)s %(r)s %(m)s %(U)s %(q)s %(H)s %(s)s %(b)s %(f)s %(a)s %(D)s %(p)s +datefmt=%d/%b/%Y:%H:%M:%S (%Z) + +[formatter_uvicorn_access] +class=logging.Formatter +format=[%(asctime)s] [%(levelname)s] [%(process)d] [%(name)s] [%(funcName)s] [%(lineno)d] %(message)s +datefmt=%d/%b/%Y:%H:%M:%S (%Z) \ No newline at end of file diff --git a/MLOps/serving_patterns/batch_pattern/src/utils/profiler.py b/MLOps/serving_patterns/batch_pattern/src/utils/profiler.py new file mode 100644 index 0000000..8f7b1b9 --- /dev/null +++ b/MLOps/serving_patterns/batch_pattern/src/utils/profiler.py @@ -0,0 +1,23 @@ +import cProfile +import os +from logging import getLogger + +logger = getLogger(__name__) + + +def do_cprofile(func): + def profiled_func(*args, **kwargs): + enable_profile = int(os.getenv("PROFILE", 1)) + if enable_profile: + profile = cProfile.Profile() + try: + profile.enable() + result = func(*args, **kwargs) + profile.disable() + return result + finally: + profile.print_stats() + else: + return func(*args, **kwargs) + + return profiled_func diff --git a/MLOps/serving_patterns/data_cache_pattern/.dockerignore b/MLOps/serving_patterns/data_cache_pattern/.dockerignore new file mode 100644 index 0000000..8c9a4ec --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/.dockerignore @@ -0,0 +1,12 @@ +dockerfile +Dockerfile +.dockerignore +log +tmp +*.sqlite3 +*.sqlite3-journal +__pycache__ +.pytest_cache +*.dvc +dvc.yaml +dvc.lock \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/Dockerfile.pred b/MLOps/serving_patterns/data_cache_pattern/Dockerfile.pred new file mode 100644 index 0000000..870aed1 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/Dockerfile.pred @@ -0,0 +1,39 @@ +FROM python:3.8-slim as builder + +ARG SERVER_DIR=resnet50_onnx_runtime +ENV PROJECT_DIR data_cache_pattern + +WORKDIR /${PROJECT_DIR} +ADD ./${SERVER_DIR}/requirements.txt /${PROJECT_DIR}/ + +COPY ./${SERVER_DIR}/extract_resnet50_onnx.py /${PROJECT_DIR}/extract_resnet50_onnx.py +COPY ./src/ml/transformers.py /${PROJECT_DIR}/src/ml/transformers.py +COPY ./data /${PROJECT_DIR}/data +COPY ./data/image_net_labels.json /${PROJECT_DIR}/data/image_net_labels.json + +RUN apt-get -y update && \ + apt-get -y install apt-utils gcc && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip install --no-cache-dir -r requirements.txt && \ + touch __init__.py && \ + touch src/__init__.py && \ + touch src/ml/__init__.py && \ + python -m extract_resnet50_onnx --pred --prep + +FROM mcr.microsoft.com/onnxruntime/server:latest + +ARG SERVER_DIR=resnet50_onnx_runtime +ENV PROJECT_DIR data_cache_pattern +ENV MODEL_BASE_PATH=${PROJECT_DIR}/models + +WORKDIR /${PROJECT_DIR} + +COPY --from=builder /${MODEL_BASE_PATH}/resnet50.onnx /${MODEL_BASE_PATH}/resnet50.onnx + +ENV MODEL_PATH /${MODEL_BASE_PATH}/resnet50.onnx + +WORKDIR /onnxruntime/server/ +COPY ./${SERVER_DIR}/onnx_runtime_server_entrypoint.sh ./onnx_runtime_server_entrypoint.sh +RUN chmod +x onnx_runtime_server_entrypoint.sh +ENTRYPOINT ["./onnx_runtime_server_entrypoint.sh"] diff --git a/MLOps/serving_patterns/data_cache_pattern/Dockerfile.proxy b/MLOps/serving_patterns/data_cache_pattern/Dockerfile.proxy new file mode 100644 index 0000000..a4377c7 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/Dockerfile.proxy @@ -0,0 +1,52 @@ +FROM python:3.8-slim as builder + +ARG SERVER_DIR=resnet50_onnx_runtime +ENV PROJECT_DIR data_cache_pattern + +WORKDIR /${PROJECT_DIR} +ADD ./${SERVER_DIR}/requirements.txt /${PROJECT_DIR}/ + +COPY ./${SERVER_DIR}/extract_resnet50_onnx.py /${PROJECT_DIR}/extract_resnet50_onnx.py +COPY ./src/ml/transformers.py /${PROJECT_DIR}/src/ml/transformers.py +COPY ./data /${PROJECT_DIR}/data + +RUN apt-get -y update && \ + apt-get -y install apt-utils gcc && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip install --no-cache-dir -r requirements.txt && \ + touch __init__.py && \ + touch src/__init__.py && \ + touch src/ml/__init__.py && \ + python -m extract_resnet50_onnx --prep + + +FROM python:3.8-slim + +ENV PROJECT_DIR data_cache_pattern +ENV MODEL_BASE_PATH=/${PROJECT_DIR}/models + +WORKDIR /${PROJECT_DIR} +ADD ./requirements.txt /${PROJECT_DIR}/ +RUN apt-get -y update && \ + apt-get -y install apt-utils gcc && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip install --no-cache-dir -r requirements.txt + +COPY ./src/ /${PROJECT_DIR}/src/ +COPY --from=builder /${MODEL_BASE_PATH}/preprocess_transformer.pkl ${MODEL_BASE_PATH}/preprocess_transformer.pkl +COPY --from=builder /${MODEL_BASE_PATH}/softmax_transformer.pkl ${MODEL_BASE_PATH}/softmax_transformer.pkl +COPY ./data /${PROJECT_DIR}/data + +ENV PREPROCESS_TRANSFORMER_PATH ${MODEL_BASE_PATH}/preprocess_transformer.pkl +ENV SOFTMAX_TRANSFORMER_PATH ${MODEL_BASE_PATH}/softmax_transformer.pkl +ENV SAMPLE_IMAGE_PATH /${PROJECT_DIR}/data/cat.jpg +ENV LABEL_PATH /${PROJECT_DIR}/data/image_net_labels.json + +ENV LOG_LEVEL DEBUG +ENV LOG_FORMAT TEXT + +COPY ./run.sh /${PROJECT_DIR}/run.sh +RUN chmod +x /${PROJECT_DIR}/run.sh +CMD [ "./run.sh" ] diff --git a/MLOps/serving_patterns/data_cache_pattern/README.md b/MLOps/serving_patterns/data_cache_pattern/README.md new file mode 100644 index 0000000..287a43f --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/README.md @@ -0,0 +1,109 @@ +# 데이터 캐시 패턴 + +## 목적 + +데이터를 캐시하고, 추론 속도를 개선합니다. + +## 전제 + +- Python 3.8 이상 +- Docker +- Docker compose + +## 사용법 + +0. 현재 디렉토리 + +```sh +$ pwd +~/ml-system-in-actions/chapter4_serving_patterns/data_cache_pattern +``` + +1. Docker 이미지 빌드 + +```sh +$ make build_all +# 실행 커맨드 +# docker build \ +# -t shibui/ml-system-in-actions:data_cache_pattern_proxy_0.0.1 \ +# -f ./Dockerfile.proxy . +# docker build \ +# -t shibui/ml-system-in-actions:data_cache_pattern_pred_0.0.1 \ +# -f ./Dockerfile.pred . +``` + +2. Docker compose 로 각 서비스 기동 + +```sh +$ make c_up +# 실행 커맨드 +# docker-compose \ +# -f ./docker-compose.yml \ +# up -d +``` + +3. 기동한 API 로 요청 + +```sh +# 헬스 체크 +$ curl localhost:8000/health +# 출력 +# { +# "health": "ok" +# } + +# 메타 데이터 +$ curl localhost:8000/metadata +# 출력 +# { +# "data_type": "str", +# "data_structure": "(1,1)", +# "data_sample": "0000", +# "prediction_type": "float32", +# "prediction_structure": "(1,1000)", +# "prediction_sample": "[0.07093159, 0.01558308, 0.01348537, ...]" +# } + +# 라벨 목록 +$ curl localhost:8000/label +# 출력 +# [ +# "background", +# "tench", +# "goldfish", +# ... +# "bolete", +# "ear", +# "toilet tissue" +# ] + + + +# 테스트 데이터로 추론 요청 +$ curl localhost:8000/predict/test/label +# 출력 +# { +# "prediction": "Persian cat" +# } + +# 이미지 요청 +$ curl \ + -X POST \ + -H "Content-Type: application/json" \ + -d '{"data": "0000"}' \ + localhost:8000/predict/label +# 출력 +# { +# "prediction": "Persian cat" +# } +``` + +4. Docker compose 정지 + +```sh +$ make c_down +# 실행 커맨드 +# docker-compose \ +# -f ./docker-compose.yml \ +# down +``` diff --git a/MLOps/serving_patterns/data_cache_pattern/__init__.py b/MLOps/serving_patterns/data_cache_pattern/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0000.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0000.jpg new file mode 100644 index 0000000..319a41d Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0000.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0001.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0001.jpg new file mode 100644 index 0000000..48b1aa5 Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0001.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0002.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0002.jpg new file mode 100644 index 0000000..c4d4861 Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0002.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0003.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0003.jpg new file mode 100644 index 0000000..eb2d372 Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0003.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0004.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0004.jpg new file mode 100644 index 0000000..3c08846 Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0004.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0005.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0005.jpg new file mode 100644 index 0000000..b2f052f Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0005.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0006.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0006.jpg new file mode 100644 index 0000000..98426e2 Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0006.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/0007.jpg b/MLOps/serving_patterns/data_cache_pattern/data/0007.jpg new file mode 100644 index 0000000..666b42c Binary files /dev/null and b/MLOps/serving_patterns/data_cache_pattern/data/0007.jpg differ diff --git a/MLOps/serving_patterns/data_cache_pattern/data/image_net_labels.json b/MLOps/serving_patterns/data_cache_pattern/data/image_net_labels.json new file mode 100644 index 0000000..3b183f3 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/data/image_net_labels.json @@ -0,0 +1,1002 @@ +[ + "tench", + "goldfish", + "great white shark", + "tiger shark", + "hammerhead", + "electric ray", + "stingray", + "cock", + "hen", + "ostrich", + "brambling", + "goldfinch", + "house finch", + "junco", + "indigo bunting", + "robin", + "bulbul", + "jay", + "magpie", + "chickadee", + "water ouzel", + "kite", + "bald eagle", + "vulture", + "great grey owl", + "European fire salamander", + "common newt", + "eft", + "spotted salamander", + "axolotl", + "bullfrog", + "tree frog", + "tailed frog", + "loggerhead", + "leatherback turtle", + "mud turtle", + "terrapin", + "box turtle", + "banded gecko", + "common iguana", + "American chameleon", + "whiptail", + "agama", + "frilled lizard", + "alligator lizard", + "Gila monster", + "green lizard", + "African chameleon", + "Komodo dragon", + "African crocodile", + "American alligator", + "triceratops", + "thunder snake", + "ringneck snake", + "hognose snake", + "green snake", + "king snake", + "garter snake", + "water snake", + "vine snake", + "night snake", + "boa constrictor", + "rock python", + "Indian cobra", + "green mamba", + "sea snake", + "horned viper", + "diamondback", + "sidewinder", + "trilobite", + "harvestman", + "scorpion", + "black and gold garden spider", + "barn spider", + "garden spider", + "black widow", + "tarantula", + "wolf spider", + "tick", + "centipede", + "black grouse", + "ptarmigan", + "ruffed grouse", + "prairie chicken", + "peacock", + "quail", + "partridge", + "African grey", + "macaw", + "sulphur-crested cockatoo", + "lorikeet", + "coucal", + "bee eater", + "hornbill", + "hummingbird", + "jacamar", + "toucan", + "drake", + "red-breasted merganser", + "goose", + "black swan", + "tusker", + "echidna", + "platypus", + "wallaby", + "koala", + "wombat", + "jellyfish", + "sea anemone", + "brain coral", + "flatworm", + "nematode", + "conch", + "snail", + "slug", + "sea slug", + "chiton", + "chambered nautilus", + "Dungeness crab", + "rock crab", + "fiddler crab", + "king crab", + "American lobster", + "spiny lobster", + "crayfish", + "hermit crab", + "isopod", + "white stork", + "black stork", + "spoonbill", + "flamingo", + "little blue heron", + "American egret", + "bittern", + "crane", + "limpkin", + "European gallinule", + "American coot", + "bustard", + "ruddy turnstone", + "red-backed sandpiper", + "redshank", + "dowitcher", + "oystercatcher", + "pelican", + "king penguin", + "albatross", + "grey whale", + "killer whale", + "dugong", + "sea lion", + "Chihuahua", + "Japanese spaniel", + "Maltese dog", + "Pekinese", + "Shih-Tzu", + "Blenheim spaniel", + "papillon", + "toy terrier", + "Rhodesian ridgeback", + "Afghan hound", + "basset", + "beagle", + "bloodhound", + "bluetick", + "black-and-tan coonhound", + "Walker hound", + "English foxhound", + "redbone", + "borzoi", + "Irish wolfhound", + "Italian greyhound", + "whippet", + "Ibizan hound", + "Norwegian elkhound", + "otterhound", + "Saluki", + "Scottish deerhound", + "Weimaraner", + "Staffordshire bullterrier", + "American Staffordshire terrier", + "Bedlington terrier", + "Border terrier", + "Kerry blue terrier", + "Irish terrier", + "Norfolk terrier", + "Norwich terrier", + "Yorkshire terrier", + "wire-haired fox terrier", + "Lakeland terrier", + "Sealyham terrier", + "Airedale", + "cairn", + "Australian terrier", + "Dandie Dinmont", + "Boston bull", + "miniature schnauzer", + "giant schnauzer", + "standard schnauzer", + "Scotch terrier", + "Tibetan terrier", + "silky terrier", + "soft-coated wheaten terrier", + "West Highland white terrier", + "Lhasa", + "flat-coated retriever", + "curly-coated retriever", + "golden retriever", + "Labrador retriever", + "Chesapeake Bay retriever", + "German short-haired pointer", + "vizsla", + "English setter", + "Irish setter", + "Gordon setter", + "Brittany spaniel", + "clumber", + "English springer", + "Welsh springer spaniel", + "cocker spaniel", + "Sussex spaniel", + "Irish water spaniel", + "kuvasz", + "schipperke", + "groenendael", + "malinois", + "briard", + "kelpie", + "komondor", + "Old English sheepdog", + "Shetland sheepdog", + "collie", + "Border collie", + "Bouvier des Flandres", + "Rottweiler", + "German shepherd", + "Doberman", + "miniature pinscher", + "Greater Swiss Mountain dog", + "Bernese mountain dog", + "Appenzeller", + "EntleBucher", + "boxer", + "bull mastiff", + "Tibetan mastiff", + "French bulldog", + "Great Dane", + "Saint Bernard", + "Eskimo dog", + "malamute", + "Siberian husky", + "dalmatian", + "affenpinscher", + "basenji", + "pug", + "Leonberg", + "Newfoundland", + "Great Pyrenees", + "Samoyed", + "Pomeranian", + "chow", + "keeshond", + "Brabancon griffon", + "Pembroke", + "Cardigan", + "toy poodle", + "miniature poodle", + "standard poodle", + "Mexican hairless", + "timber wolf", + "white wolf", + "red wolf", + "coyote", + "dingo", + "dhole", + "African hunting dog", + "hyena", + "red fox", + "kit fox", + "Arctic fox", + "grey fox", + "tabby", + "tiger cat", + "Persian cat", + "Siamese cat", + "Egyptian cat", + "cougar", + "lynx", + "leopard", + "snow leopard", + "jaguar", + "lion", + "tiger", + "cheetah", + "brown bear", + "American black bear", + "ice bear", + "sloth bear", + "mongoose", + "meerkat", + "tiger beetle", + "ladybug", + "ground beetle", + "long-horned beetle", + "leaf beetle", + "dung beetle", + "rhinoceros beetle", + "weevil", + "fly", + "bee", + "ant", + "grasshopper", + "cricket", + "walking stick", + "cockroach", + "mantis", + "cicada", + "leafhopper", + "lacewing", + "dragonfly", + "damselfly", + "admiral", + "ringlet", + "monarch", + "cabbage butterfly", + "sulphur butterfly", + "lycaenid", + "starfish", + "sea urchin", + "sea cucumber", + "wood rabbit", + "hare", + "Angora", + "hamster", + "porcupine", + "fox squirrel", + "marmot", + "beaver", + "guinea pig", + "sorrel", + "zebra", + "hog", + "wild boar", + "warthog", + "hippopotamus", + "ox", + "water buffalo", + "bison", + "ram", + "bighorn", + "ibex", + "hartebeest", + "impala", + "gazelle", + "Arabian camel", + "llama", + "weasel", + "mink", + "polecat", + "black-footed ferret", + "otter", + "skunk", + "badger", + "armadillo", + "three-toed sloth", + "orangutan", + "gorilla", + "chimpanzee", + "gibbon", + "siamang", + "guenon", + "patas", + "baboon", + "macaque", + "langur", + "colobus", + "proboscis monkey", + "marmoset", + "capuchin", + "howler monkey", + "titi", + "spider monkey", + "squirrel monkey", + "Madagascar cat", + "indri", + "Indian elephant", + "African elephant", + "lesser panda", + "giant panda", + "barracouta", + "eel", + "coho", + "rock beauty", + "anemone fish", + "sturgeon", + "gar", + "lionfish", + "puffer", + "abacus", + "abaya", + "academic gown", + "accordion", + "acoustic guitar", + "aircraft carrier", + "airliner", + "airship", + "altar", + "ambulance", + "amphibian", + "analog clock", + "apiary", + "apron", + "ashcan", + "assault rifle", + "backpack", + "bakery", + "balance beam", + "balloon", + "ballpoint", + "Band Aid", + "banjo", + "bannister", + "barbell", + "barber chair", + "barbershop", + "barn", + "barometer", + "barrel", + "barrow", + "baseball", + "basketball", + "bassinet", + "bassoon", + "bathing cap", + "bath towel", + "bathtub", + "beach wagon", + "beacon", + "beaker", + "bearskin", + "beer bottle", + "beer glass", + "bell cote", + "bib", + "bicycle-built-for-two", + "bikini", + "binder", + "binoculars", + "birdhouse", + "boathouse", + "bobsled", + "bolo tie", + "bonnet", + "bookcase", + "bookshop", + "bottlecap", + "bow", + "bow tie", + "brass", + "brassiere", + "breakwater", + "breastplate", + "broom", + "bucket", + "buckle", + "bulletproof vest", + "bullet train", + "butcher shop", + "cab", + "caldron", + "candle", + "cannon", + "canoe", + "can opener", + "cardigan", + "car mirror", + "carousel", + "carpenter's kit", + "carton", + "car wheel", + "cash machine", + "cassette", + "cassette player", + "castle", + "catamaran", + "CD player", + "cello", + "cellular telephone", + "chain", + "chainlink fence", + "chain mail", + "chain saw", + "chest", + "chiffonier", + "chime", + "china cabinet", + "Christmas stocking", + "church", + "cinema", + "cleaver", + "cliff dwelling", + "cloak", + "clog", + "cocktail shaker", + "coffee mug", + "coffeepot", + "coil", + "combination lock", + "computer keyboard", + "confectionery", + "container ship", + "convertible", + "corkscrew", + "cornet", + "cowboy boot", + "cowboy hat", + "cradle", + "crane", + "crash helmet", + "crate", + "crib", + "Crock Pot", + "croquet ball", + "crutch", + "cuirass", + "dam", + "desk", + "desktop computer", + "dial telephone", + "diaper", + "digital clock", + "digital watch", + "dining table", + "dishrag", + "dishwasher", + "disk brake", + "dock", + "dogsled", + "dome", + "doormat", + "drilling platform", + "drum", + "drumstick", + "dumbbell", + "Dutch oven", + "electric fan", + "electric guitar", + "electric locomotive", + "entertainment center", + "envelope", + "espresso maker", + "face powder", + "feather boa", + "file", + "fireboat", + "fire engine", + "fire screen", + "flagpole", + "flute", + "folding chair", + "football helmet", + "forklift", + "fountain", + "fountain pen", + "four-poster", + "freight car", + "French horn", + "frying pan", + "fur coat", + "garbage truck", + "gasmask", + "gas pump", + "goblet", + "go-kart", + "golf ball", + "golfcart", + "gondola", + "gong", + "gown", + "grand piano", + "greenhouse", + "grille", + "grocery store", + "guillotine", + "hair slide", + "hair spray", + "half track", + "hammer", + "hamper", + "hand blower", + "hand-held computer", + "handkerchief", + "hard disc", + "harmonica", + "harp", + "harvester", + "hatchet", + "holster", + "home theater", + "honeycomb", + "hook", + "hoopskirt", + "horizontal bar", + "horse cart", + "hourglass", + "iPod", + "iron", + "jack-o'-lantern", + "jean", + "jeep", + "jersey", + "jigsaw puzzle", + "jinrikisha", + "joystick", + "kimono", + "knee pad", + "knot", + "lab coat", + "ladle", + "lampshade", + "laptop", + "lawn mower", + "lens cap", + "letter opener", + "library", + "lifeboat", + "lighter", + "limousine", + "liner", + "lipstick", + "Loafer", + "lotion", + "loudspeaker", + "loupe", + "lumbermill", + "magnetic compass", + "mailbag", + "mailbox", + "maillot", + "maillot", + "manhole cover", + "maraca", + "marimba", + "mask", + "matchstick", + "maypole", + "maze", + "measuring cup", + "medicine chest", + "megalith", + "microphone", + "microwave", + "military uniform", + "milk can", + "minibus", + "miniskirt", + "minivan", + "missile", + "mitten", + "mixing bowl", + "mobile home", + "Model T", + "modem", + "monastery", + "monitor", + "moped", + "mortar", + "mortarboard", + "mosque", + "mosquito net", + "motor scooter", + "mountain bike", + "mountain tent", + "mouse", + "mousetrap", + "moving van", + "muzzle", + "nail", + "neck brace", + "necklace", + "nipple", + "notebook", + "obelisk", + "oboe", + "ocarina", + "odometer", + "oil filter", + "organ", + "oscilloscope", + "overskirt", + "oxcart", + "oxygen mask", + "packet", + "paddle", + "paddlewheel", + "padlock", + "paintbrush", + "pajama", + "palace", + "panpipe", + "paper towel", + "parachute", + "parallel bars", + "park bench", + "parking meter", + "passenger car", + "patio", + "pay-phone", + "pedestal", + "pencil box", + "pencil sharpener", + "perfume", + "Petri dish", + "photocopier", + "pick", + "pickelhaube", + "picket fence", + "pickup", + "pier", + "piggy bank", + "pill bottle", + "pillow", + "ping-pong ball", + "pinwheel", + "pirate", + "pitcher", + "plane", + "planetarium", + "plastic bag", + "plate rack", + "plow", + "plunger", + "Polaroid camera", + "pole", + "police van", + "poncho", + "pool table", + "pop bottle", + "pot", + "potter's wheel", + "power drill", + "prayer rug", + "printer", + "prison", + "projectile", + "projector", + "puck", + "punching bag", + "purse", + "quill", + "quilt", + "racer", + "racket", + "radiator", + "radio", + "radio telescope", + "rain barrel", + "recreational vehicle", + "reel", + "reflex camera", + "refrigerator", + "remote control", + "restaurant", + "revolver", + "rifle", + "rocking chair", + "rotisserie", + "rubber eraser", + "rugby ball", + "rule", + "running shoe", + "safe", + "safety pin", + "saltshaker", + "sandal", + "sarong", + "sax", + "scabbard", + "scale", + "school bus", + "schooner", + "scoreboard", + "screen", + "screw", + "screwdriver", + "seat belt", + "sewing machine", + "shield", + "shoe shop", + "shoji", + "shopping basket", + "shopping cart", + "shovel", + "shower cap", + "shower curtain", + "ski", + "ski mask", + "sleeping bag", + "slide rule", + "sliding door", + "slot", + "snorkel", + "snowmobile", + "snowplow", + "soap dispenser", + "soccer ball", + "sock", + "solar dish", + "sombrero", + "soup bowl", + "space bar", + "space heater", + "space shuttle", + "spatula", + "speedboat", + "spider web", + "spindle", + "sports car", + "spotlight", + "stage", + "steam locomotive", + "steel arch bridge", + "steel drum", + "stethoscope", + "stole", + "stone wall", + "stopwatch", + "stove", + "strainer", + "streetcar", + "stretcher", + "studio couch", + "stupa", + "submarine", + "suit", + "sundial", + "sunglass", + "sunglasses", + "sunscreen", + "suspension bridge", + "swab", + "sweatshirt", + "swimming trunks", + "swing", + "switch", + "syringe", + "table lamp", + "tank", + "tape player", + "teapot", + "teddy", + "television", + "tennis ball", + "thatch", + "theater curtain", + "thimble", + "thresher", + "throne", + "tile roof", + "toaster", + "tobacco shop", + "toilet seat", + "torch", + "totem pole", + "tow truck", + "toyshop", + "tractor", + "trailer truck", + "tray", + "trench coat", + "tricycle", + "trimaran", + "tripod", + "triumphal arch", + "trolleybus", + "trombone", + "tub", + "turnstile", + "typewriter keyboard", + "umbrella", + "unicycle", + "upright", + "vacuum", + "vase", + "vault", + "velvet", + "vending machine", + "vestment", + "viaduct", + "violin", + "volleyball", + "waffle iron", + "wall clock", + "wallet", + "wardrobe", + "warplane", + "washbasin", + "washer", + "water bottle", + "water jug", + "water tower", + "whiskey jug", + "whistle", + "wig", + "window screen", + "window shade", + "Windsor tie", + "wine bottle", + "wing", + "wok", + "wooden spoon", + "wool", + "worm fence", + "wreck", + "yawl", + "yurt", + "web site", + "comic book", + "crossword puzzle", + "street sign", + "traffic light", + "book jacket", + "menu", + "plate", + "guacamole", + "consomme", + "hot pot", + "trifle", + "ice cream", + "ice lolly", + "French loaf", + "bagel", + "pretzel", + "cheeseburger", + "hotdog", + "mashed potato", + "head cabbage", + "broccoli", + "cauliflower", + "zucchini", + "spaghetti squash", + "acorn squash", + "butternut squash", + "cucumber", + "artichoke", + "bell pepper", + "cardoon", + "mushroom", + "Granny Smith", + "strawberry", + "orange", + "lemon", + "fig", + "pineapple", + "banana", + "jackfruit", + "custard apple", + "pomegranate", + "hay", + "carbonara", + "chocolate sauce", + "dough", + "meat loaf", + "pizza", + "potpie", + "burrito", + "red wine", + "espresso", + "cup", + "eggnog", + "alp", + "bubble", + "cliff", + "coral reef", + "geyser", + "lakeside", + "promontory", + "sandbar", + "seashore", + "valley", + "volcano", + "ballplayer", + "groom", + "scuba diver", + "rapeseed", + "daisy", + "yellow lady's slipper", + "corn", + "acorn", + "hip", + "buckeye", + "coral fungus", + "agaric", + "gyromitra", + "stinkhorn", + "earthstar", + "hen-of-the-woods", + "bolete", + "ear", + "toilet tissue" +] diff --git a/MLOps/serving_patterns/data_cache_pattern/docker-compose.yml b/MLOps/serving_patterns/data_cache_pattern/docker-compose.yml new file mode 100644 index 0000000..0d5efb5 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/docker-compose.yml @@ -0,0 +1,34 @@ +version: "3" + +services: + proxy: + container_name: proxy + image: shibui/ml-system-in-actions:data_cache_pattern_proxy_0.0.1 + restart: always + environment: + - PLATFORM=docker_compose + - API_ADDRESS=pred + ports: + - "8000:8000" + command: ./run.sh + depends_on: + - pred + - redis + + pred: + container_name: pred + image: shibui/ml-system-in-actions:data_cache_pattern_pred_0.0.1 + restart: always + environment: + - HTTP_PORT=8001 + - GRPC_PORT=50051 + ports: + - "8001:8001" + - "50051:50051" + entrypoint: ["./onnx_runtime_server_entrypoint.sh"] + + redis: + container_name: redis + image: "redis:latest" + ports: + - "6379:6379" diff --git a/MLOps/serving_patterns/data_cache_pattern/makefile b/MLOps/serving_patterns/data_cache_pattern/makefile new file mode 100644 index 0000000..f2e841e --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/makefile @@ -0,0 +1,69 @@ +DOCKER_REPOSITORY := shibui/ml-system-in-actions + +ABSOLUTE_PATH := $(shell pwd) + +DOCKERFILE := Dockerfile +DOCKER_COMPOSE := docker-compose.yml +IMAGE_VERSION := 0.0.1 + +DATA_CACHE_PATTERN := data_cache_pattern +DATA_CACHE_PATTERN_PREP := proxy +DATA_CACHE_PATTERN_PREP_PORT := 8000 +DATA_CACHE_PATTERN_PRED := pred +DATA_CACHE_PATTERN_REST_PORT := 8001 +DATA_CACHE_PATTERN_GRPC_PORT := 50051 + +.PHONY: proto +proto: + python \ + -m grpc_tools.protoc \ + -I src/proto \ + --python_out=src/proto \ + --grpc_python_out=src/proto \ + src/proto/onnx-ml.proto \ + src/proto/predict.proto \ + src/proto/prediction_service.proto + +.PHONY: build_proxy +build_proxy: + docker build \ + -t $(DOCKER_REPOSITORY):$(DATA_CACHE_PATTERN)_$(DATA_CACHE_PATTERN_PREP)_$(IMAGE_VERSION) \ + -f ./$(DOCKERFILE).proxy . + +.PHONY: push_proxy +push_proxy: + docker push $(DOCKER_REPOSITORY):$(DATA_CACHE_PATTERN)_$(DATA_CACHE_PATTERN_PREP)_$(IMAGE_VERSION) + + +.PHONY: build_pred +build_pred: + docker build \ + -t $(DOCKER_REPOSITORY):$(DATA_CACHE_PATTERN)_$(DATA_CACHE_PATTERN_PRED)_$(IMAGE_VERSION) \ + -f ./$(DOCKERFILE).pred . + +.PHONY: push_pred +push_pred: + docker push $(DOCKER_REPOSITORY):$(DATA_CACHE_PATTERN)_$(DATA_CACHE_PATTERN_PRED)_$(IMAGE_VERSION) + +.PHONY: build_all +build_all: build_proxy build_pred + + +.PHONY: push_all +push_all: push_proxy push_pred + +.PHONY: c_build +c_build: build_all + +.PHONY: c_up +c_up: + docker-compose \ + -f ./$(DOCKER_COMPOSE) \ + up -d + +.PHONY: c_down +c_down: + docker-compose \ + -f ./$(DOCKER_COMPOSE) \ + down + diff --git a/MLOps/serving_patterns/data_cache_pattern/requirements.txt b/MLOps/serving_patterns/data_cache_pattern/requirements.txt new file mode 100644 index 0000000..6de54e8 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/requirements.txt @@ -0,0 +1,23 @@ +fastapi>=0.65.2 +future>=0.18.2 +gunicorn>=20.0.4 +importlib-metadata>=1.7.0 +joblib>=0.15.1 +numpy>=1.18.5 +onnx>=1.7.0 +onnxruntime>=1.4.0 +Pillow>=8.3.2 +psutil>=5.7.0 +pydantic>=1.8.2 +PyYAML>=5.3.1 +redis>=3.5.3 +scikit-learn>=0.23.1 +starlette>=0.13.4 +typing>=3.7.4.1 +uvicorn>=0.11.7 +uvloop>=0.14.0 +httptools>=0.1.1 +python-json-logger>=2.0.1 +loguru>=0.5.3 +requests>=2.25.0 +grpcio>=1.32.0 \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/__init__.py b/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/extract_resnet50_onnx.py b/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/extract_resnet50_onnx.py new file mode 100644 index 0000000..cc83a73 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/extract_resnet50_onnx.py @@ -0,0 +1,85 @@ +import json +import os +from typing import List, Tuple, Union + +import click +import joblib +import numpy as np +import onnxruntime as rt +import requests +import torch +import torch.nn as nn +from PIL import Image +from src.ml.transformers import PytorchImagePreprocessTransformer, SoftmaxTransformer +from torchvision.models.resnet import resnet50 + + +def dump_sklearn(model, name: str): + joblib.dump(model, name) + + +def get_label(json_path: str = "./data/image_net_labels.json") -> List[str]: + with open(json_path, "r") as f: + labels = json.load(f) + return labels + + +@click.command(name="extract resnet50 onnx runtime and preprocessing") +@click.option("--pred", is_flag=True) +@click.option("--prep", is_flag=True) +def main(pred: bool, prep: bool): + model_directory = "./models/" + os.makedirs(model_directory, exist_ok=True) + + onnx_filename = "resnet50.onnx" + onnx_filepath = os.path.join(model_directory, onnx_filename) + + preprocess_filename = f"preprocess_transformer.pkl" + preprocess_filepath = os.path.join(model_directory, preprocess_filename) + + postprocess_filename = f"softmax_transformer.pkl" + postprocess_filepath = os.path.join(model_directory, postprocess_filename) + + if pred: + model = resnet50(pretrained=True) + x_dummy = torch.rand((1, 3, 224, 224), device="cpu") + model.eval() + torch.onnx.export( + model, + x_dummy, + onnx_filepath, + export_params=True, + opset_version=10, + do_constant_folding=True, + input_names=["input"], + output_names=["output"], + verbose=False, + ) + + if prep: + preprocess = PytorchImagePreprocessTransformer() + dump_sklearn(preprocess, preprocess_filepath) + + postprocess = SoftmaxTransformer() + dump_sklearn(postprocess, postprocess_filepath) + + if prep and pred: + image = Image.open("./data/0000.jpg") + np_image = preprocess.transform(image) + print(np_image.shape) + + sess = rt.InferenceSession(onnx_filepath) + inp, out = sess.get_inputs()[0], sess.get_outputs()[0] + print(f"input name='{inp.name}' shape={inp.shape} type={inp.type}") + print(f"output name='{out.name}' shape={out.shape} type={out.type}") + pred_onx = sess.run([out.name], {inp.name: np_image}) + + prediction = postprocess.transform(np.array(pred_onx)) + + labels = get_label(json_path="./data/image_net_labels.json") + print(prediction.shape) + print(labels[np.argmax(prediction[0])]) + + +if __name__ == "__main__": + main() diff --git a/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/onnx_runtime_server_entrypoint.sh b/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/onnx_runtime_server_entrypoint.sh new file mode 100644 index 0000000..3ea2996 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/onnx_runtime_server_entrypoint.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +set -eu + +HTTP_PORT=${HTTP_PORT:-8001} +GRPC_PORT=${GRPC_PORT:-50051} +LOGLEVEL=${LOGLEVEL:-"debug"} +NUM_HTTP_THREADS=${NUM_HTTP_THREADS:-4} +MODEL_PATH=${MODEL_PATH:-"/data_cache_pattern/models/resnet50.onnx"} + +./onnxruntime_server \ + --http_port=${HTTP_PORT} \ + --grpc_port=${GRPC_PORT} \ + --num_http_threads=${NUM_HTTP_THREADS} \ + --model_path=${MODEL_PATH} \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/requirements.txt b/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/requirements.txt new file mode 100644 index 0000000..d64d579 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/resnet50_onnx_runtime/requirements.txt @@ -0,0 +1,13 @@ +joblib>=0.15.1 +numpy>=1.18.5 +onnxruntime>=1.4.0 +Pillow>=8.3.2 +pydantic>=1.8.2 +PyYAML>=5.3.1 +scikit-learn==0.23.1 +skl2onnx>=1.7.0 +typing>=3.7.4.1 +requests>=2.25.1 +torch>=1.7.0 +torchvision>=0.8.1 +click>=7.1.2 \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/run.sh b/MLOps/serving_patterns/data_cache_pattern/run.sh new file mode 100644 index 0000000..9621e17 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/run.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -eu + +HOST=${HOST:-"0.0.0.0"} +PORT=${PORT:-8000} +WORKERS=${WORKERS:-4} +UVICORN_WORKER=${UVICORN_WORKER:-"uvicorn.workers.UvicornWorker"} +LOGLEVEL=${LOGLEVEL:-"debug"} +LOGCONFIG=${LOGCONFIG:-"./src/utils/logging.conf"} +BACKLOG=${BACKLOG:-2048} +LIMIT_MAX_REQUESTS=${LIMIT_MAX_REQUESTS:-65536} +MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-2048} +GRACEFUL_TIMEOUT=${GRACEFUL_TIMEOUT:-10} +APP_NAME=${APP_NAME:-"src.app.app:app"} + +gunicorn ${APP_NAME} \ + -b ${HOST}:${PORT} \ + -w ${WORKERS} \ + -k ${UVICORN_WORKER} \ + --log-level ${LOGLEVEL} \ + --log-config ${LOGCONFIG} \ + --backlog ${BACKLOG} \ + --max-requests ${LIMIT_MAX_REQUESTS} \ + --max-requests-jitter ${MAX_REQUESTS_JITTER} \ + --graceful-timeout ${GRACEFUL_TIMEOUT} \ + --reload diff --git a/MLOps/serving_patterns/data_cache_pattern/src/__init__.py b/MLOps/serving_patterns/data_cache_pattern/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/src/app/__init__.py b/MLOps/serving_patterns/data_cache_pattern/src/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/src/app/app.py b/MLOps/serving_patterns/data_cache_pattern/src/app/app.py new file mode 100644 index 0000000..e3902d5 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/app/app.py @@ -0,0 +1,16 @@ +import os +from logging import getLogger + +from fastapi import FastAPI +from src.app.routers import routers +from src.configurations import APIConfigurations + +logger = getLogger(__name__) + +app = FastAPI( + title=APIConfigurations.title, + description=APIConfigurations.description, + version=APIConfigurations.version, +) + +app.include_router(routers.router, prefix="", tags=[""]) diff --git a/MLOps/serving_patterns/data_cache_pattern/src/app/backend/__init__.py b/MLOps/serving_patterns/data_cache_pattern/src/app/backend/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/src/app/backend/background_job.py b/MLOps/serving_patterns/data_cache_pattern/src/app/backend/background_job.py new file mode 100644 index 0000000..b435207 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/app/backend/background_job.py @@ -0,0 +1,49 @@ +import json +import logging +from typing import Any, Dict, List, Union + +from fastapi import BackgroundTasks +from pydantic import BaseModel +from src.app.backend.redis_client import redis_client + +logger = logging.getLogger(__name__) + + +def set_data_redis(key: str, value: Any) -> bool: + data_json = json.dumps(value) + redis_client.set(key, data_json) + return True + + +def get_data_redis(key: str) -> Union[List, None]: + data = redis_client.get(key) + if data is None: + return None + original_data = json.loads(data) + return original_data + + +class SaveDataJob(BaseModel): + item_id: str + data: Any + is_completed: bool = False + + def __call__(self): + pass + + +class SaveDataRedisJob(SaveDataJob): + def __call__(self): + save_data_jobs[self.item_id] = self + logger.info(f"registered cache: {self.item_id} in {self.__class__.__name__}") + self.is_completed = set_data_redis(key=self.item_id, value=self.data) + logger.info(f"completed save data: {self.item_id}") + + +def save_data_job(data: Any, item_id: str, background_tasks: BackgroundTasks) -> str: + task = SaveDataRedisJob(item_id=item_id, data=data) + background_tasks.add_task(task) + return item_id + + +save_data_jobs: Dict[str, SaveDataJob] = {} diff --git a/MLOps/serving_patterns/data_cache_pattern/src/app/backend/redis_client.py b/MLOps/serving_patterns/data_cache_pattern/src/app/backend/redis_client.py new file mode 100644 index 0000000..1103b51 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/app/backend/redis_client.py @@ -0,0 +1,9 @@ +import redis +from src.configurations import RedisCacheConfigurations + +redis_client = redis.Redis( + host=RedisCacheConfigurations.cache_host, + port=RedisCacheConfigurations.cache_port, + db=RedisCacheConfigurations.redis_db, + decode_responses=RedisCacheConfigurations.redis_decode_responses, +) diff --git a/MLOps/serving_patterns/data_cache_pattern/src/app/routers/__init__.py b/MLOps/serving_patterns/data_cache_pattern/src/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/src/app/routers/routers.py b/MLOps/serving_patterns/data_cache_pattern/src/app/routers/routers.py new file mode 100644 index 0000000..461bd66 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/app/routers/routers.py @@ -0,0 +1,54 @@ +from logging import getLogger +from typing import Any, Dict, List + +from fastapi import APIRouter, BackgroundTasks +from src.ml.prediction import Data, classifier + +logger = getLogger(__name__) +router = APIRouter() + + +@router.get("/health") +def health() -> Dict[str, str]: + return {"health": "ok"} + + +@router.get("/metadata") +def metadata() -> Dict[str, Any]: + return { + "data_type": "str", + "data_structure": "(1,1)", + "data_sample": Data().data, + "prediction_type": "float32", + "prediction_structure": "(1,1000)", + "prediction_sample": "[0.07093159, 0.01558308, 0.01348537, ...]", + } + + +@router.get("/label") +def label() -> Dict[int, str]: + return classifier.label + + +@router.get("/predict/test") +def predict_test(background_tasks: BackgroundTasks) -> Dict[str, List[float]]: + prediction = classifier.predict(data=Data(), background_tasks=background_tasks) + return {"prediction": list(prediction)} + + +@router.get("/predict/test/label") +def predict_test_label(background_tasks: BackgroundTasks) -> Dict[str, str]: + prediction = classifier.predict_label(data=Data(), background_tasks=background_tasks) + return {"prediction": prediction} + + +@router.post("/predict") +def predict(data: Data, background_tasks: BackgroundTasks) -> Dict[str, List[float]]: + prediction = classifier.predict(data=data, background_tasks=background_tasks) + return {"prediction": list(prediction)} + + +@router.post("/predict/label") +def predict_label(data: Data, background_tasks: BackgroundTasks) -> Dict[str, str]: + prediction = classifier.predict_label(data=data, background_tasks=background_tasks) + return {"prediction": prediction} diff --git a/MLOps/serving_patterns/data_cache_pattern/src/configurations.py b/MLOps/serving_patterns/data_cache_pattern/src/configurations.py new file mode 100644 index 0000000..670d150 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/configurations.py @@ -0,0 +1,53 @@ +import os +from logging import getLogger + +from src.constants import PLATFORM_ENUM + +logger = getLogger(__name__) + + +class PlatformConfigurations: + platform = os.getenv("PLATFORM", PLATFORM_ENUM.DOCKER.value) + if not PLATFORM_ENUM.has_value(platform): + raise ValueError(f"PLATFORM must be one of {[v.value for v in PLATFORM_ENUM.__members__.values()]}") + + +class CacheConfigurations: + cache_host = os.getenv("CACHE_HOST", "redis") + cache_port = int(os.getenv("CACHE_PORT", 6379)) + queue_name = os.getenv("QUEUE_NAME", "queue") + + +class RedisCacheConfigurations(CacheConfigurations): + redis_db = int(os.getenv("REDIS_DB", 0)) + redis_decode_responses = bool(os.getenv("REDIS_DECODE_RESPONSES", True)) + + +class APIConfigurations: + title = os.getenv("API_TITLE", "ServingPattern") + description = os.getenv("API_DESCRIPTION", "machine learning system serving patterns") + version = os.getenv("API_VERSION", "0.1") + + +class ModelConfigurations: + api_address = os.getenv("API_ADDRESS", "localhost") + grpc_port = int(os.getenv("GRPC_PORT", 50051)) + rest_api_port = int(os.getenv("REST_API_PORT", 8001)) + label_path = os.getenv("LABEL_PATH", "/data_cache_pattern/data/image_net_labels.json") + + preprocess_transformer_path = os.getenv( + "PREPROCESS_TRANSFORMER_PATH", "/data_cache_pattern/models/preprocess_transformer.pkl" + ) + softmax_transformer_path = os.getenv( + "SOFTMAX_TRANSFORMER_PATH", "/data_cache_pattern/models/softmax_transformer.pkl" + ) + + onnx_input_name = os.getenv("ONNX_INPUT_NAME", "input") + onnx_output_name = os.getenv("ONNX_OUTPUT_NAME", "output") + + +logger.info(f"{PlatformConfigurations.__name__}: {PlatformConfigurations.__dict__}") +logger.info(f"{CacheConfigurations.__name__}: {CacheConfigurations.__dict__}") +logger.info(f"{RedisCacheConfigurations.__name__}: {RedisCacheConfigurations.__dict__}") +logger.info(f"{APIConfigurations.__name__}: {APIConfigurations.__dict__}") +logger.info(f"{ModelConfigurations.__name__}: {ModelConfigurations.__dict__}") diff --git a/MLOps/serving_patterns/data_cache_pattern/src/constants.py b/MLOps/serving_patterns/data_cache_pattern/src/constants.py new file mode 100644 index 0000000..23a0075 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/constants.py @@ -0,0 +1,35 @@ +import enum + + +class PLATFORM_ENUM(enum.Enum): + DOCKER = "docker" + DOCKER_COMPOSE = "docker_compose" + KUBERNETES = "kubernetes" + TEST = "test" + + @staticmethod + def has_value(item): + return item in [v.value for v in PLATFORM_ENUM.__members__.values()] + + +def constant(f): + def fset(self, value): + raise TypeError + + def fget(self): + return f() + + return property(fget, fset) + + +class _Constants(object): + @constant + def REDIS_INCREMENTS(): + return "increments" + + @constant + def REDIS_QUEUE(): + return "redis_queue" + + +CONSTANTS = _Constants() diff --git a/MLOps/serving_patterns/data_cache_pattern/src/ml/__init__.py b/MLOps/serving_patterns/data_cache_pattern/src/ml/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/src/ml/prediction.py b/MLOps/serving_patterns/data_cache_pattern/src/ml/prediction.py new file mode 100644 index 0000000..2e6f8d0 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/ml/prediction.py @@ -0,0 +1,119 @@ +import json +import os +from logging import getLogger +from typing import List + +import grpc +import joblib +import numpy as np +from fastapi import BackgroundTasks +from PIL import Image +from pydantic import BaseModel +from src.app.backend import background_job +from src.configurations import ModelConfigurations +from src.ml.transformers import PytorchImagePreprocessTransformer, SoftmaxTransformer +from src.proto import onnx_ml_pb2, predict_pb2, prediction_service_pb2_grpc + +logger = getLogger(__name__) + + +class Data(BaseModel): + data: str = "0000" + + +class Classifier(object): + def __init__( + self, + preprocess_transformer_path: str = "/data_cache_pattern/models/preprocess_transformer.pkl", + softmax_transformer_path: str = "/data_cache_pattern/models/softmax_transformer.pkl", + label_path: str = "/data_cache_pattern/data/image_net_labels.json", + serving_address: str = "localhost:50051", + onnx_input_name: str = "input", + onnx_output_name: str = "output", + ): + self.preprocess_transformer_path: str = preprocess_transformer_path + self.softmax_transformer_path: str = softmax_transformer_path + self.preprocess_transformer: PytorchImagePreprocessTransformer = None + self.softmax_transformer: SoftmaxTransformer = None + + self.serving_address = serving_address + self.channel = grpc.insecure_channel(self.serving_address) + self.stub = prediction_service_pb2_grpc.PredictionServiceStub(self.channel) + + self.label_path = label_path + self.label: List[str] = [] + + self.onnx_input_name: str = onnx_input_name + self.onnx_output_name: str = onnx_output_name + + self.load_model() + self.load_label() + + def load_model(self): + logger.info(f"load preprocess in {self.preprocess_transformer_path}") + self.preprocess_transformer = joblib.load(self.preprocess_transformer_path) + logger.info(f"initialized preprocess") + + logger.info(f"load postprocess in {self.softmax_transformer_path}") + self.softmax_transformer = joblib.load(self.softmax_transformer_path) + logger.info(f"initialized postprocess") + + def load_label(self): + logger.info(f"load label in {self.label_path}") + with open(self.label_path, "r") as f: + self.label = json.load(f) + logger.info(f"label: {self.label}") + + def predict( + self, + data: Data, + background_tasks: BackgroundTasks, + ) -> List[float]: + cache_data = background_job.get_data_redis(key=data.data) + if cache_data is None: + logger.info(f"registering cache: {data.data}") + image = Image.open(os.path.join("data/", f"{data.data}.jpg")) + preprocessed = self.preprocess_transformer.transform(image) + background_job.save_data_job( + data=preprocessed.tolist(), item_id=data.data, background_tasks=background_tasks + ) + else: + logger.info(f"cache hit: {data.data}") + preprocessed = np.array(cache_data).astype(np.float32) + + input_tensor = onnx_ml_pb2.TensorProto() + input_tensor.dims.extend(preprocessed.shape) + input_tensor.data_type = 1 + input_tensor.raw_data = preprocessed.tobytes() + + request_message = predict_pb2.PredictRequest() + request_message.inputs[self.onnx_input_name].data_type = input_tensor.data_type + request_message.inputs[self.onnx_input_name].dims.extend(preprocessed.shape) + request_message.inputs[self.onnx_input_name].raw_data = input_tensor.raw_data + + response = self.stub.Predict(request_message) + output = np.frombuffer(response.outputs[self.onnx_output_name].raw_data, dtype=np.float32) + + softmax = self.softmax_transformer.transform(output).tolist() + + logger.info(f"predict proba {softmax}") + return softmax + + def predict_label( + self, + data: Data, + background_tasks: BackgroundTasks, + ) -> str: + softmax = self.predict(data=data, background_tasks=background_tasks) + argmax = int(np.argmax(np.array(softmax)[0])) + return self.label[argmax] + + +classifier = Classifier( + preprocess_transformer_path=ModelConfigurations().preprocess_transformer_path, + softmax_transformer_path=ModelConfigurations().softmax_transformer_path, + label_path=ModelConfigurations().label_path, + serving_address=f"{ModelConfigurations.api_address}:{ModelConfigurations.grpc_port}", + onnx_input_name=ModelConfigurations().onnx_input_name, + onnx_output_name=ModelConfigurations().onnx_output_name, +) diff --git a/MLOps/serving_patterns/data_cache_pattern/src/ml/transformers.py b/MLOps/serving_patterns/data_cache_pattern/src/ml/transformers.py new file mode 100644 index 0000000..304e9be --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/ml/transformers.py @@ -0,0 +1,56 @@ +from typing import List, Tuple, Union + +import numpy as np +from PIL import Image +from sklearn.base import BaseEstimator, TransformerMixin + + +class PytorchImagePreprocessTransformer(BaseEstimator, TransformerMixin): + def __init__( + self, + image_size: Tuple[int, int] = (224, 224), + prediction_shape: Tuple[int, int, int, int] = (1, 3, 224, 224), + mean_vec: List[float] = [0.485, 0.456, 0.406], + stddev_vec: List[float] = [0.229, 0.224, 0.225], + ): + self.image_size = image_size + self.prediction_shape = prediction_shape + self.mean_vec = mean_vec + self.stddev_vec = stddev_vec + + def fit(self, X, y=None): + return self + + def transform(self, X: Union[Image.Image, np.ndarray]) -> np.ndarray: + if isinstance(X, np.ndarray): + dim_0 = (3,) + self.image_size + dim_1 = self.image_size + (3,) + if X.shape != dim_0 and X.shape != dim_1: + raise ValueError(f"resize to image_size {self.image_size} beforehand for numpy array") + else: + X = np.array(X.resize(self.image_size)) + + image_data = X.transpose(2, 0, 1).astype(np.float32) + mean_vec = np.array(self.mean_vec) + stddev_vec = np.array(self.stddev_vec) + norm_image_data = np.zeros(image_data.shape).astype(np.float32) + for i in range(image_data.shape[0]): + norm_image_data[i, :, :] = (image_data[i, :, :] / 255 - mean_vec[i]) / stddev_vec[i] + norm_image_data = norm_image_data.reshape(self.prediction_shape).astype(np.float32) + return norm_image_data + + +class SoftmaxTransformer(BaseEstimator, TransformerMixin): + def __init__(self): + pass + + def fit(self, X, y=None): + return self + + def transform(self, X: Union[np.ndarray, List[float], List[List[float]]]) -> np.ndarray: + if isinstance(X, List): + X = np.array(X) + x = X.reshape(-1) + e_x = np.exp(x - np.max(x)) + result = np.array([e_x / e_x.sum(axis=0)]) + return result diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx-ml.proto b/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx-ml.proto new file mode 100644 index 0000000..57ee68d --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx-ml.proto @@ -0,0 +1,636 @@ +// +// WARNING: This file is automatically generated! Please edit onnx.in.proto. +// + + +// Copyright (c) ONNX Project Contributors. +// Licensed under the MIT license. + +syntax = "proto2"; + +package onnx; + +// Overview +// +// ONNX is an open specification that is comprised of the following components: +// +// 1) A definition of an extensible computation graph model. +// 2) Definitions of standard data types. +// 3) Definitions of built-in operators. +// +// This document describes the syntax of models and their computation graphs, +// as well as the standard data types. Together, they are referred to as the ONNX +// Intermediate Representation, or 'IR' for short. +// +// The normative semantic specification of the ONNX IR is found in docs/IR.md. +// Definitions of the built-in neural network operators may be found in docs/Operators.md. +// Definitions of the built-in classical machine learning operators may be found in +// docs/Operators-ml.md. + +// Notes +// +// Release +// +// We are still in the very early stage of defining ONNX. The current +// version of ONNX is a starting point. While we are actively working +// towards a complete spec, we would like to get the community involved +// by sharing our working version of ONNX. +// +// Protobuf compatibility +// +// To simplify framework compatibility, ONNX is defined using the subset of protobuf +// that is compatible with both protobuf v2 and v3. This means that we do not use any +// protobuf features that are only available in one of the two versions. +// +// Here are the most notable contortions we have to carry out to work around +// these limitations: +// +// - No 'map' (added protobuf 3.0). We instead represent mappings as lists +// of key-value pairs, where order does not matter and duplicates +// are not allowed. + + +// Versioning +// +// ONNX versioning is specified in docs/IR.md and elaborated on in docs/Versioning.md +// +// To be compatible with both proto2 and proto3, we will use a version number +// that is not defined by the default value but an explicit enum number. +enum Version { + // proto3 requires the first enum value to be zero. + // We add this just to appease the compiler. + _START_VERSION = 0; + // The version field is always serialized and we will use it to store the + // version that the graph is generated from. This helps us set up version + // control. + // For the IR, we are using simple numbers starting with with 0x00000001, + // which was the version we published on Oct 10, 2017. + IR_VERSION_2017_10_10 = 0x0000000000000001; + + // IR_VERSION 2 published on Oct 30, 2017 + // - Added type discriminator to AttributeProto to support proto3 users + IR_VERSION_2017_10_30 = 0x0000000000000002; + + // IR VERSION 3 published on Nov 3, 2017 + // - For operator versioning: + // - Added new message OperatorSetIdProto + // - Added opset_import in ModelProto + // - For vendor extensions, added domain in NodeProto + IR_VERSION_2017_11_3 = 0x0000000000000003; + + // IR VERSION 4 published on Jan 22, 2019 + // - Relax constraint that initializers should be a subset of graph inputs + // - Add type BFLOAT16 + IR_VERSION_2019_1_22 = 0x0000000000000004; + + // IR VERSION 5 published on March 18, 2019 + // - Add message TensorAnnotation. + // - Add quantization annotation in GraphProto to map tensor with its scale and zero point quantization parameters. + IR_VERSION_2019_3_18 = 0x0000000000000005; + + // IR VERSION 6 published on Sep 19, 2019 + // - Add support for sparse tensor constants stored in model. + // - Add message SparseTensorProto + // - Add sparse initializers + IR_VERSION = 0x0000000000000006; +} + +// Attributes +// +// A named attribute containing either singular float, integer, string, graph, +// and tensor values, or repeated float, integer, string, graph, and tensor values. +// An AttributeProto MUST contain the name field, and *only one* of the +// following content fields, effectively enforcing a C/C++ union equivalent. +message AttributeProto { + + // Note: this enum is structurally identical to the OpSchema::AttrType + // enum defined in schema.h. If you rev one, you likely need to rev the other. + enum AttributeType { + UNDEFINED = 0; + FLOAT = 1; + INT = 2; + STRING = 3; + TENSOR = 4; + GRAPH = 5; + SPARSE_TENSOR = 11; + + FLOATS = 6; + INTS = 7; + STRINGS = 8; + TENSORS = 9; + GRAPHS = 10; + SPARSE_TENSORS = 12; + } + + // The name field MUST be present for this version of the IR. + optional string name = 1; // namespace Attribute + + // if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function. + // In this case, this AttributeProto does not contain data, and it's a reference of attribute + // in parent scope. + // NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph. + optional string ref_attr_name = 21; + + // A human-readable documentation for this attribute. Markdown is allowed. + optional string doc_string = 13; + + // The type field MUST be present for this version of the IR. + // For 0.0.1 versions of the IR, this field was not defined, and + // implementations needed to use has_field hueristics to determine + // which value field was in use. For IR_VERSION 0.0.2 or later, this + // field MUST be set and match the f|i|s|t|... field in use. This + // change was made to accomodate proto3 implementations. + optional AttributeType type = 20; // discriminator that indicates which field below is in use + + // Exactly ONE of the following fields must be present for this version of the IR + optional float f = 2; // float + optional int64 i = 3; // int + optional bytes s = 4; // UTF-8 string + optional TensorProto t = 5; // tensor value + optional GraphProto g = 6; // graph + optional SparseTensorProto sparse_tensor = 22; // sparse tensor value + // Do not use field below, it's deprecated. + // optional ValueProto v = 12; // value - subsumes everything but graph + + repeated float floats = 7; // list of floats + repeated int64 ints = 8; // list of ints + repeated bytes strings = 9; // list of UTF-8 strings + repeated TensorProto tensors = 10; // list of tensors + repeated GraphProto graphs = 11; // list of graph + repeated SparseTensorProto sparse_tensors = 23; // list of sparse tensors +} + +// Defines information on value, including the name, the type, and +// the shape of the value. +message ValueInfoProto { + // This field MUST be present in this version of the IR. + optional string name = 1; // namespace Value + // This field MUST be present in this version of the IR for + // inputs and outputs of the top-level graph. + optional TypeProto type = 2; + // A human-readable documentation for this value. Markdown is allowed. + optional string doc_string = 3; +} + +// Nodes +// +// Computation graphs are made up of a DAG of nodes, which represent what is +// commonly called a "layer" or "pipeline stage" in machine learning frameworks. +// +// For example, it can be a node of type "Conv" that takes in an image, a filter +// tensor and a bias tensor, and produces the convolved output. +message NodeProto { + repeated string input = 1; // namespace Value + repeated string output = 2; // namespace Value + + // An optional identifier for this node in a graph. + // This field MAY be absent in ths version of the IR. + optional string name = 3; // namespace Node + + // The symbolic identifier of the Operator to execute. + optional string op_type = 4; // namespace Operator + // The domain of the OperatorSet that specifies the operator named by op_type. + optional string domain = 7; // namespace Domain + + // Additional named attributes. + repeated AttributeProto attribute = 5; + + // A human-readable documentation for this node. Markdown is allowed. + optional string doc_string = 6; +} + +// Models +// +// ModelProto is a top-level file/container format for bundling a ML model and +// associating its computation graph with metadata. +// +// The semantics of the model are described by the associated GraphProto. +message ModelProto { + // The version of the IR this model targets. See Version enum above. + // This field MUST be present. + optional int64 ir_version = 1; + + // The OperatorSets this model relies on. + // All ModelProtos MUST have at least one entry that + // specifies which version of the ONNX OperatorSet is + // being imported. + // + // All nodes in the ModelProto's graph will bind against the operator + // with the same-domain/same-op_type operator with the HIGHEST version + // in the referenced operator sets. + repeated OperatorSetIdProto opset_import = 8; + + // The name of the framework or tool used to generate this model. + // This field SHOULD be present to indicate which implementation/tool/framework + // emitted the model. + optional string producer_name = 2; + + // The version of the framework or tool used to generate this model. + // This field SHOULD be present to indicate which implementation/tool/framework + // emitted the model. + optional string producer_version = 3; + + // Domain name of the model. + // We use reverse domain names as name space indicators. For example: + // `com.facebook.fair` or `com.microsoft.cognitiveservices` + // + // Together with `model_version` and GraphProto.name, this forms the unique identity of + // the graph. + optional string domain = 4; + + // The version of the graph encoded. See Version enum below. + optional int64 model_version = 5; + + // A human-readable documentation for this model. Markdown is allowed. + optional string doc_string = 6; + + // The parameterized graph that is evaluated to execute the model. + optional GraphProto graph = 7; + + // kezhan: This field is not in ONNX, and will be pushed into ONNX with good use cases in microsoft. + repeated FunctionProto functions = 100; + + // Named metadata values; keys should be distinct. + repeated StringStringEntryProto metadata_props = 14; +}; + +// StringStringEntryProto follows the pattern for cross-proto-version maps. +// See https://developers.google.com/protocol-buffers/docs/proto3#maps +message StringStringEntryProto { + optional string key = 1; + optional string value= 2; +}; + +message TensorAnnotation { + optional string tensor_name = 1; + // pairs to annotate tensor specified by above. + // The keys used in the mapping below must be pre-defined in ONNX spec. + // For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as + // quantization parameter keys. + repeated StringStringEntryProto quant_parameter_tensor_names = 2; +} + +// Graphs +// +// A graph defines the computational logic of a model and is comprised of a parameterized +// list of nodes that form a directed acyclic graph based on their inputs and outputs. +// This is the equivalent of the "network" or "graph" in many deep learning +// frameworks. +message GraphProto { + // The nodes in the graph, sorted topologically. + repeated NodeProto node = 1; + + // The name of the graph. + optional string name = 2; // namespace Graph + + // A list of named tensor values, used to specify constant inputs of the graph. + // Each TensorProto entry must have a distinct name (within the list) that + // MAY also appear in the input list. + repeated TensorProto initializer = 5; + + // Initializers (see above) stored in sparse format. + repeated SparseTensorProto sparse_initializer = 15; + + // A human-readable documentation for this graph. Markdown is allowed. + optional string doc_string = 10; + + // The inputs and outputs of the graph. + repeated ValueInfoProto input = 11; + repeated ValueInfoProto output = 12; + + // Information for the values in the graph. The ValueInfoProto.name's + // must be distinct. It is optional for a value to appear in value_info list. + repeated ValueInfoProto value_info = 13; + + // This field carries information to indicate the mapping among a tensor and its + // quantization parameter tensors. For example: + // For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated, + // which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model. + repeated TensorAnnotation quantization_annotation = 14; + + // DO NOT USE the following fields, they were deprecated from earlier versions. + // repeated string input = 3; + // repeated string output = 4; + // optional int64 ir_version = 6; + // optional int64 producer_version = 7; + // optional string producer_tag = 8; + // optional string domain = 9; +} + +// Tensors +// +// A serialized tensor value. +message TensorProto { + enum DataType { + UNDEFINED = 0; + // Basic types. + FLOAT = 1; // float + UINT8 = 2; // uint8_t + INT8 = 3; // int8_t + UINT16 = 4; // uint16_t + INT16 = 5; // int16_t + INT32 = 6; // int32_t + INT64 = 7; // int64_t + STRING = 8; // string + BOOL = 9; // bool + + // IEEE754 half-precision floating-point format (16 bits wide). + // This format has 1 sign bit, 5 exponent bits, and 10 mantissa bits. + FLOAT16 = 10; + + DOUBLE = 11; + UINT32 = 12; + UINT64 = 13; + COMPLEX64 = 14; // complex with float32 real and imaginary components + COMPLEX128 = 15; // complex with float64 real and imaginary components + + // Non-IEEE floating-point format based on IEEE754 single-precision + // floating-point number truncated to 16 bits. + // This format has 1 sign bit, 8 exponent bits, and 7 mantissa bits. + BFLOAT16 = 16; + + // Future extensions go here. + } + + // The shape of the tensor. + repeated int64 dims = 1; + + // The data type of the tensor. + // This field MUST have a valid TensorProto.DataType value + optional int32 data_type = 2; + + // For very large tensors, we may want to store them in chunks, in which + // case the following fields will specify the segment that is stored in + // the current TensorProto. + message Segment { + optional int64 begin = 1; + optional int64 end = 2; + } + optional Segment segment = 3; + + // Tensor content must be organized in row-major order. + // + // Depending on the data_type field, exactly one of the fields below with + // name ending in _data is used to store the elements of the tensor. + + // For float and complex64 values + // Complex64 tensors are encoded as a single array of floats, + // with the real components appearing in odd numbered positions, + // and the corresponding imaginary component apparing in the + // subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i] + // is encoded as [1.0, 2.0 ,3.0 ,4.0] + // When this field is present, the data_type field MUST be FLOAT or COMPLEX64. + repeated float float_data = 4 [packed = true]; + + // For int32, uint8, int8, uint16, int16, bool, and float16 values + // float16 values must be bit-wise converted to an uint16_t prior + // to writing to the buffer. + // When this field is present, the data_type field MUST be + // INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16 + repeated int32 int32_data = 5 [packed = true]; + + // For strings. + // Each element of string_data is a UTF-8 encoded Unicode + // string. No trailing null, no leading BOM. The protobuf "string" + // scalar type is not used to match ML community conventions. + // When this field is present, the data_type field MUST be STRING + repeated bytes string_data = 6; + + // For int64. + // When this field is present, the data_type field MUST be INT64 + repeated int64 int64_data = 7 [packed = true]; + + // Optionally, a name for the tensor. + optional string name = 8; // namespace Value + + // A human-readable documentation for this tensor. Markdown is allowed. + optional string doc_string = 12; + + // Serializations can either use one of the fields above, or use this + // raw bytes field. The only exception is the string case, where one is + // required to store the content in the repeated bytes string_data field. + // + // When this raw_data field is used to store tensor value, elements MUST + // be stored in as fixed-width, little-endian order. + // Floating-point data types MUST be stored in IEEE 754 format. + // Complex64 elements must be written as two consecutive FLOAT values, real component first. + // Complex128 elements must be written as two consecutive DOUBLE values, real component first. + // Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false). + // + // Note: the advantage of specific field rather than the raw_data field is + // that in some cases (e.g. int data), protobuf does a better packing via + // variable length storage, and may lead to smaller binary footprint. + // When this field is present, the data_type field MUST NOT be STRING or UNDEFINED + optional bytes raw_data = 9; + + // Data can be stored inside the protobuf file using type-specific fields or raw_data. + // Alternatively, raw bytes data can be stored in an external file, using the external_data field. + // external_data stores key-value pairs describing data location. Recognized keys are: + // - "location" (required) - POSIX filesystem path relative to the directory where the ONNX + // protobuf model was stored + // - "offset" (optional) - position of byte at which stored data begins. Integer stored as string. + // Offset values SHOULD be multiples 4096 (page size) to enable mmap support. + // - "length" (optional) - number of bytes containing data. Integer stored as string. + // - "checksum" (optional) - SHA1 digest of file specified in under 'location' key. + repeated StringStringEntryProto external_data = 13; + + // Location of the data for this tensor. MUST be one of: + // - DEFAULT - data stored inside the protobuf message. Data is stored in raw_data (if set) otherwise in type-specified field. + // - EXTERNAL - data stored in an external location as described by external_data field. + enum DataLocation { + DEFAULT = 0; + EXTERNAL = 1; + } + + // If value not set, data is stored in raw_data (if set) otherwise in type-specified field. + optional DataLocation data_location = 14; + + // For double + // Complex128 tensors are encoded as a single array of doubles, + // with the real components appearing in odd numbered positions, + // and the corresponding imaginary component apparing in the + // subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i] + // is encoded as [1.0, 2.0 ,3.0 ,4.0] + // When this field is present, the data_type field MUST be DOUBLE or COMPLEX128 + repeated double double_data = 10 [packed = true]; + + // For uint64 and uint32 values + // When this field is present, the data_type field MUST be + // UINT32 or UINT64 + repeated uint64 uint64_data = 11 [packed = true]; +} + +// A serialized sparse-tensor value +message SparseTensorProto { + // The sequence of non-default values are encoded as a tensor of shape [NNZ]. + // The default-value is zero for numeric tensors, and empty-string for string tensors. + optional TensorProto values = 1; + + // The indices of the non-default values, which may be stored in one of two formats. + // (a) Indices can be a tensor of shape [NNZ, rank] with the [i,j]-th value + // corresponding to the j-th index of the i-th value (in the values tensor). + // (b) Indices can be a tensor of shape [NNZ], in which case the i-th value + // must be the linearized-index of the i-th value (in the values tensor). + // The linearized-index can be converted into an index tuple (k_1,...,k_rank) + // using the shape provided below. + // The indices must appear in ascending order without duplication. + // In the first format, the ordering is lexicographic-ordering: + // e.g., index-value [1,4] must appear before [2,1] + optional TensorProto indices = 2; + + // The shape of the underlying dense-tensor: [dim_1, dim_2, ... dim_rank] + repeated int64 dims = 3; +} + +// Defines a tensor shape. A dimension can be either an integer value +// or a symbolic variable. A symbolic variable represents an unknown +// dimension. +message TensorShapeProto { + message Dimension { + oneof value { + int64 dim_value = 1; + string dim_param = 2; // namespace Shape + }; + // Standard denotation can optionally be used to denote tensor + // dimensions with standard semantic descriptions to ensure + // that operations are applied to the correct axis of a tensor. + // Refer to https://github.com/onnx/onnx/blob/master/docs/DimensionDenotation.md#denotation-definition + // for pre-defined dimension denotations. + optional string denotation = 3; + }; + repeated Dimension dim = 1; +} + +// Types +// +// The standard ONNX data types. +message TypeProto { + + message Tensor { + // This field MUST NOT have the value of UNDEFINED + // This field MUST have a valid TensorProto.DataType value + // This field MUST be present for this version of the IR. + optional int32 elem_type = 1; + optional TensorShapeProto shape = 2; + } + + // repeated T + message Sequence { + // The type and optional shape of each element of the sequence. + // This field MUST be present for this version of the IR. + optional TypeProto elem_type = 1; + }; + + // map + message Map { + // This field MUST have a valid TensorProto.DataType value + // This field MUST be present for this version of the IR. + // This field MUST refer to an integral type ([U]INT{8|16|32|64}) or STRING + optional int32 key_type = 1; + // This field MUST be present for this version of the IR. + optional TypeProto value_type = 2; + }; + + + message SparseTensor { + // This field MUST NOT have the value of UNDEFINED + // This field MUST have a valid TensorProto.DataType value + // This field MUST be present for this version of the IR. + optional int32 elem_type = 1; + optional TensorShapeProto shape = 2; + } + + message Opaque { + // When missing, the domain is the same as the model's. + optional string domain = 1; + // The name is optional but significant when provided. + optional string name = 2; + // parameters that help defining the type + // DEPRECATED do not use. + // repeated TypeProto parameters = 3; + } + + + oneof value { + // The type of a tensor. + Tensor tensor_type = 1; + + // NOTE: DNN-only implementations of ONNX MAY elect to not support non-tensor values + // as input and output to graphs and nodes. These types are needed to naturally + // support classical ML operators. DNN operators SHOULD restrict their input + // and output types to tensors. + + // The type of a sequence. + Sequence sequence_type = 4; + + // The type of a map. + Map map_type = 5; + + + SparseTensor sparse_tensor_type = 8; + + Opaque opaque_type = 7; + + + } + + // An optional denotation can be used to denote the whole + // type with a standard semantic description as to what is + // stored inside. Refer to https://github.com/onnx/onnx/blob/master/docs/TypeDenotation.md#type-denotation-definition + // for pre-defined type denotations. + optional string denotation = 6; +} + +// Operator Sets +// +// OperatorSets are uniquely identified by a (domain, opset_version) pair. +message OperatorSetIdProto { + // The domain of the operator set being identified. + // The empty string ("") or absence of this field implies the operator + // set that is defined as part of the ONNX specification. + // This field MUST be present in this version of the IR when referring to any other operator set. + optional string domain = 1; + + // The version of the operator set being identified. + // This field MUST be present in this version of the IR. + optional int64 version = 2; +} + +// Operator/function status. +enum OperatorStatus { + EXPERIMENTAL = 0; + STABLE = 1; +} + +message FunctionProto { + // The name of the function, similar usage of op_type in OperatorProto. + optional string name = 1; + + // The first version of a function set which contains this function. + // When there's any breaking change for this function, the function set + // contains the function needs to bump its version, and since_version of + // the updated function will be changed to the updated function set version. + optional int64 since_version = 2; + + // This field indicates whether the syntax, semantics, or presence + // of this function is in an experimental or stable stage. Once an + // function is published as STABLE, its syntax and semantics MUST NOT + // change in subsequent versions of the operator set. + // When a function is published as EXPERIMENTAL, the syntax and semantics + // of the function MAY change across operator set versions. + // Functions "become" stable by deprecating the experimental version and + // introducing a new stable function with the same name. + optional OperatorStatus status = 3; + + // The inputs and outputs of the function. + repeated string input = 4; + repeated string output = 5; + + // The attributes of the function. + repeated string attribute= 6; + + // The nodes in the function. + repeated NodeProto node = 7; + // A human-readable documentation for this function. Markdown is allowed. + optional string doc_string = 8; +} \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx_ml_pb2.py b/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx_ml_pb2.py new file mode 100644 index 0000000..73e1052 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx_ml_pb2.py @@ -0,0 +1,3056 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: onnx-ml.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import enum_type_wrapper + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="onnx-ml.proto", + package="onnx", + syntax="proto2", + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\ronnx-ml.proto\x12\x04onnx"\xe8\x04\n\x0e\x41ttributeProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rref_attr_name\x18\x15 \x01(\t\x12\x12\n\ndoc_string\x18\r \x01(\t\x12\x30\n\x04type\x18\x14 \x01(\x0e\x32".onnx.AttributeProto.AttributeType\x12\t\n\x01\x66\x18\x02 \x01(\x02\x12\t\n\x01i\x18\x03 \x01(\x03\x12\t\n\x01s\x18\x04 \x01(\x0c\x12\x1c\n\x01t\x18\x05 \x01(\x0b\x32\x11.onnx.TensorProto\x12\x1b\n\x01g\x18\x06 \x01(\x0b\x32\x10.onnx.GraphProto\x12.\n\rsparse_tensor\x18\x16 \x01(\x0b\x32\x17.onnx.SparseTensorProto\x12\x0e\n\x06\x66loats\x18\x07 \x03(\x02\x12\x0c\n\x04ints\x18\x08 \x03(\x03\x12\x0f\n\x07strings\x18\t \x03(\x0c\x12"\n\x07tensors\x18\n \x03(\x0b\x32\x11.onnx.TensorProto\x12 \n\x06graphs\x18\x0b \x03(\x0b\x32\x10.onnx.GraphProto\x12/\n\x0esparse_tensors\x18\x17 \x03(\x0b\x32\x17.onnx.SparseTensorProto"\xb8\x01\n\rAttributeType\x12\r\n\tUNDEFINED\x10\x00\x12\t\n\x05\x46LOAT\x10\x01\x12\x07\n\x03INT\x10\x02\x12\n\n\x06STRING\x10\x03\x12\n\n\x06TENSOR\x10\x04\x12\t\n\x05GRAPH\x10\x05\x12\x11\n\rSPARSE_TENSOR\x10\x0b\x12\n\n\x06\x46LOATS\x10\x06\x12\x08\n\x04INTS\x10\x07\x12\x0b\n\x07STRINGS\x10\x08\x12\x0b\n\x07TENSORS\x10\t\x12\n\n\x06GRAPHS\x10\n\x12\x12\n\x0eSPARSE_TENSORS\x10\x0c"Q\n\x0eValueInfoProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1d\n\x04type\x18\x02 \x01(\x0b\x32\x0f.onnx.TypeProto\x12\x12\n\ndoc_string\x18\x03 \x01(\t"\x96\x01\n\tNodeProto\x12\r\n\x05input\x18\x01 \x03(\t\x12\x0e\n\x06output\x18\x02 \x03(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07op_type\x18\x04 \x01(\t\x12\x0e\n\x06\x64omain\x18\x07 \x01(\t\x12\'\n\tattribute\x18\x05 \x03(\x0b\x32\x14.onnx.AttributeProto\x12\x12\n\ndoc_string\x18\x06 \x01(\t"\xbb\x02\n\nModelProto\x12\x12\n\nir_version\x18\x01 \x01(\x03\x12.\n\x0copset_import\x18\x08 \x03(\x0b\x32\x18.onnx.OperatorSetIdProto\x12\x15\n\rproducer_name\x18\x02 \x01(\t\x12\x18\n\x10producer_version\x18\x03 \x01(\t\x12\x0e\n\x06\x64omain\x18\x04 \x01(\t\x12\x15\n\rmodel_version\x18\x05 \x01(\x03\x12\x12\n\ndoc_string\x18\x06 \x01(\t\x12\x1f\n\x05graph\x18\x07 \x01(\x0b\x32\x10.onnx.GraphProto\x12&\n\tfunctions\x18\x64 \x03(\x0b\x32\x13.onnx.FunctionProto\x12\x34\n\x0emetadata_props\x18\x0e \x03(\x0b\x32\x1c.onnx.StringStringEntryProto"4\n\x16StringStringEntryProto\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"k\n\x10TensorAnnotation\x12\x13\n\x0btensor_name\x18\x01 \x01(\t\x12\x42\n\x1cquant_parameter_tensor_names\x18\x02 \x03(\x0b\x32\x1c.onnx.StringStringEntryProto"\xd8\x02\n\nGraphProto\x12\x1d\n\x04node\x18\x01 \x03(\x0b\x32\x0f.onnx.NodeProto\x12\x0c\n\x04name\x18\x02 \x01(\t\x12&\n\x0binitializer\x18\x05 \x03(\x0b\x32\x11.onnx.TensorProto\x12\x33\n\x12sparse_initializer\x18\x0f \x03(\x0b\x32\x17.onnx.SparseTensorProto\x12\x12\n\ndoc_string\x18\n \x01(\t\x12#\n\x05input\x18\x0b \x03(\x0b\x32\x14.onnx.ValueInfoProto\x12$\n\x06output\x18\x0c \x03(\x0b\x32\x14.onnx.ValueInfoProto\x12(\n\nvalue_info\x18\r \x03(\x0b\x32\x14.onnx.ValueInfoProto\x12\x37\n\x17quantization_annotation\x18\x0e \x03(\x0b\x32\x16.onnx.TensorAnnotation"\xb8\x05\n\x0bTensorProto\x12\x0c\n\x04\x64ims\x18\x01 \x03(\x03\x12\x11\n\tdata_type\x18\x02 \x01(\x05\x12*\n\x07segment\x18\x03 \x01(\x0b\x32\x19.onnx.TensorProto.Segment\x12\x16\n\nfloat_data\x18\x04 \x03(\x02\x42\x02\x10\x01\x12\x16\n\nint32_data\x18\x05 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bstring_data\x18\x06 \x03(\x0c\x12\x16\n\nint64_data\x18\x07 \x03(\x03\x42\x02\x10\x01\x12\x0c\n\x04name\x18\x08 \x01(\t\x12\x12\n\ndoc_string\x18\x0c \x01(\t\x12\x10\n\x08raw_data\x18\t \x01(\x0c\x12\x33\n\rexternal_data\x18\r \x03(\x0b\x32\x1c.onnx.StringStringEntryProto\x12\x35\n\rdata_location\x18\x0e \x01(\x0e\x32\x1e.onnx.TensorProto.DataLocation\x12\x17\n\x0b\x64ouble_data\x18\n \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0buint64_data\x18\x0b \x03(\x04\x42\x02\x10\x01\x1a%\n\x07Segment\x12\r\n\x05\x62\x65gin\x18\x01 \x01(\x03\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x03"\xda\x01\n\x08\x44\x61taType\x12\r\n\tUNDEFINED\x10\x00\x12\t\n\x05\x46LOAT\x10\x01\x12\t\n\x05UINT8\x10\x02\x12\x08\n\x04INT8\x10\x03\x12\n\n\x06UINT16\x10\x04\x12\t\n\x05INT16\x10\x05\x12\t\n\x05INT32\x10\x06\x12\t\n\x05INT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\x08\n\x04\x42OOL\x10\t\x12\x0b\n\x07\x46LOAT16\x10\n\x12\n\n\x06\x44OUBLE\x10\x0b\x12\n\n\x06UINT32\x10\x0c\x12\n\n\x06UINT64\x10\r\x12\r\n\tCOMPLEX64\x10\x0e\x12\x0e\n\nCOMPLEX128\x10\x0f\x12\x0c\n\x08\x42\x46LOAT16\x10\x10")\n\x0c\x44\x61taLocation\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x0c\n\x08\x45XTERNAL\x10\x01"h\n\x11SparseTensorProto\x12!\n\x06values\x18\x01 \x01(\x0b\x32\x11.onnx.TensorProto\x12"\n\x07indices\x18\x02 \x01(\x0b\x32\x11.onnx.TensorProto\x12\x0c\n\x04\x64ims\x18\x03 \x03(\x03"\x95\x01\n\x10TensorShapeProto\x12-\n\x03\x64im\x18\x01 \x03(\x0b\x32 .onnx.TensorShapeProto.Dimension\x1aR\n\tDimension\x12\x13\n\tdim_value\x18\x01 \x01(\x03H\x00\x12\x13\n\tdim_param\x18\x02 \x01(\tH\x00\x12\x12\n\ndenotation\x18\x03 \x01(\tB\x07\n\x05value"\xc2\x04\n\tTypeProto\x12-\n\x0btensor_type\x18\x01 \x01(\x0b\x32\x16.onnx.TypeProto.TensorH\x00\x12\x31\n\rsequence_type\x18\x04 \x01(\x0b\x32\x18.onnx.TypeProto.SequenceH\x00\x12\'\n\x08map_type\x18\x05 \x01(\x0b\x32\x13.onnx.TypeProto.MapH\x00\x12:\n\x12sparse_tensor_type\x18\x08 \x01(\x0b\x32\x1c.onnx.TypeProto.SparseTensorH\x00\x12-\n\x0bopaque_type\x18\x07 \x01(\x0b\x32\x16.onnx.TypeProto.OpaqueH\x00\x12\x12\n\ndenotation\x18\x06 \x01(\t\x1a\x42\n\x06Tensor\x12\x11\n\telem_type\x18\x01 \x01(\x05\x12%\n\x05shape\x18\x02 \x01(\x0b\x32\x16.onnx.TensorShapeProto\x1a.\n\x08Sequence\x12"\n\telem_type\x18\x01 \x01(\x0b\x32\x0f.onnx.TypeProto\x1a<\n\x03Map\x12\x10\n\x08key_type\x18\x01 \x01(\x05\x12#\n\nvalue_type\x18\x02 \x01(\x0b\x32\x0f.onnx.TypeProto\x1aH\n\x0cSparseTensor\x12\x11\n\telem_type\x18\x01 \x01(\x05\x12%\n\x05shape\x18\x02 \x01(\x0b\x32\x16.onnx.TensorShapeProto\x1a&\n\x06Opaque\x12\x0e\n\x06\x64omain\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\tB\x07\n\x05value"5\n\x12OperatorSetIdProto\x12\x0e\n\x06\x64omain\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03"\xbf\x01\n\rFunctionProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rsince_version\x18\x02 \x01(\x03\x12$\n\x06status\x18\x03 \x01(\x0e\x32\x14.onnx.OperatorStatus\x12\r\n\x05input\x18\x04 \x03(\t\x12\x0e\n\x06output\x18\x05 \x03(\t\x12\x11\n\tattribute\x18\x06 \x03(\t\x12\x1d\n\x04node\x18\x07 \x03(\x0b\x32\x0f.onnx.NodeProto\x12\x12\n\ndoc_string\x18\x08 \x01(\t*\xb1\x01\n\x07Version\x12\x12\n\x0e_START_VERSION\x10\x00\x12\x19\n\x15IR_VERSION_2017_10_10\x10\x01\x12\x19\n\x15IR_VERSION_2017_10_30\x10\x02\x12\x18\n\x14IR_VERSION_2017_11_3\x10\x03\x12\x18\n\x14IR_VERSION_2019_1_22\x10\x04\x12\x18\n\x14IR_VERSION_2019_3_18\x10\x05\x12\x0e\n\nIR_VERSION\x10\x06*.\n\x0eOperatorStatus\x12\x10\n\x0c\x45XPERIMENTAL\x10\x00\x12\n\n\x06STABLE\x10\x01', +) + +_VERSION = _descriptor.EnumDescriptor( + name="Version", + full_name="onnx.Version", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="_START_VERSION", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IR_VERSION_2017_10_10", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IR_VERSION_2017_10_30", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IR_VERSION_2017_11_3", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IR_VERSION_2019_1_22", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IR_VERSION_2019_3_18", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IR_VERSION", + index=6, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3494, + serialized_end=3671, +) +_sym_db.RegisterEnumDescriptor(_VERSION) + +Version = enum_type_wrapper.EnumTypeWrapper(_VERSION) +_OPERATORSTATUS = _descriptor.EnumDescriptor( + name="OperatorStatus", + full_name="onnx.OperatorStatus", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="EXPERIMENTAL", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="STABLE", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3673, + serialized_end=3719, +) +_sym_db.RegisterEnumDescriptor(_OPERATORSTATUS) + +OperatorStatus = enum_type_wrapper.EnumTypeWrapper(_OPERATORSTATUS) +_START_VERSION = 0 +IR_VERSION_2017_10_10 = 1 +IR_VERSION_2017_10_30 = 2 +IR_VERSION_2017_11_3 = 3 +IR_VERSION_2019_1_22 = 4 +IR_VERSION_2019_3_18 = 5 +IR_VERSION = 6 +EXPERIMENTAL = 0 +STABLE = 1 + + +_ATTRIBUTEPROTO_ATTRIBUTETYPE = _descriptor.EnumDescriptor( + name="AttributeType", + full_name="onnx.AttributeProto.AttributeType", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="UNDEFINED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FLOAT", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INT", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="STRING", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="TENSOR", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="GRAPH", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="SPARSE_TENSOR", + index=6, + number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FLOATS", + index=7, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INTS", + index=8, + number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="STRINGS", + index=9, + number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="TENSORS", + index=10, + number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="GRAPHS", + index=11, + number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="SPARSE_TENSORS", + index=12, + number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=456, + serialized_end=640, +) +_sym_db.RegisterEnumDescriptor(_ATTRIBUTEPROTO_ATTRIBUTETYPE) + +_TENSORPROTO_DATATYPE = _descriptor.EnumDescriptor( + name="DataType", + full_name="onnx.TensorProto.DataType", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="UNDEFINED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FLOAT", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="UINT8", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INT8", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="UINT16", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INT16", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INT32", + index=6, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INT64", + index=7, + number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="STRING", + index=8, + number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="BOOL", + index=9, + number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FLOAT16", + index=10, + number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="DOUBLE", + index=11, + number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="UINT32", + index=12, + number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="UINT64", + index=13, + number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="COMPLEX64", + index=14, + number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="COMPLEX128", + index=15, + number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="BFLOAT16", + index=16, + number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2142, + serialized_end=2360, +) +_sym_db.RegisterEnumDescriptor(_TENSORPROTO_DATATYPE) + +_TENSORPROTO_DATALOCATION = _descriptor.EnumDescriptor( + name="DataLocation", + full_name="onnx.TensorProto.DataLocation", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="DEFAULT", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="EXTERNAL", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2362, + serialized_end=2403, +) +_sym_db.RegisterEnumDescriptor(_TENSORPROTO_DATALOCATION) + + +_ATTRIBUTEPROTO = _descriptor.Descriptor( + name="AttributeProto", + full_name="onnx.AttributeProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="onnx.AttributeProto.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="ref_attr_name", + full_name="onnx.AttributeProto.ref_attr_name", + index=1, + number=21, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="doc_string", + full_name="onnx.AttributeProto.doc_string", + index=2, + number=13, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="type", + full_name="onnx.AttributeProto.type", + index=3, + number=20, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="f", + full_name="onnx.AttributeProto.f", + index=4, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="i", + full_name="onnx.AttributeProto.i", + index=5, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="s", + full_name="onnx.AttributeProto.s", + index=6, + number=4, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"", + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="t", + full_name="onnx.AttributeProto.t", + index=7, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="g", + full_name="onnx.AttributeProto.g", + index=8, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="sparse_tensor", + full_name="onnx.AttributeProto.sparse_tensor", + index=9, + number=22, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="floats", + full_name="onnx.AttributeProto.floats", + index=10, + number=7, + type=2, + cpp_type=6, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="ints", + full_name="onnx.AttributeProto.ints", + index=11, + number=8, + type=3, + cpp_type=2, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="strings", + full_name="onnx.AttributeProto.strings", + index=12, + number=9, + type=12, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="tensors", + full_name="onnx.AttributeProto.tensors", + index=13, + number=10, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="graphs", + full_name="onnx.AttributeProto.graphs", + index=14, + number=11, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="sparse_tensors", + full_name="onnx.AttributeProto.sparse_tensors", + index=15, + number=23, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[ + _ATTRIBUTEPROTO_ATTRIBUTETYPE, + ], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=24, + serialized_end=640, +) + + +_VALUEINFOPROTO = _descriptor.Descriptor( + name="ValueInfoProto", + full_name="onnx.ValueInfoProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="onnx.ValueInfoProto.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="type", + full_name="onnx.ValueInfoProto.type", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="doc_string", + full_name="onnx.ValueInfoProto.doc_string", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=642, + serialized_end=723, +) + + +_NODEPROTO = _descriptor.Descriptor( + name="NodeProto", + full_name="onnx.NodeProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="input", + full_name="onnx.NodeProto.input", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output", + full_name="onnx.NodeProto.output", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="onnx.NodeProto.name", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="op_type", + full_name="onnx.NodeProto.op_type", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="domain", + full_name="onnx.NodeProto.domain", + index=4, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="attribute", + full_name="onnx.NodeProto.attribute", + index=5, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="doc_string", + full_name="onnx.NodeProto.doc_string", + index=6, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=726, + serialized_end=876, +) + + +_MODELPROTO = _descriptor.Descriptor( + name="ModelProto", + full_name="onnx.ModelProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="ir_version", + full_name="onnx.ModelProto.ir_version", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="opset_import", + full_name="onnx.ModelProto.opset_import", + index=1, + number=8, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="producer_name", + full_name="onnx.ModelProto.producer_name", + index=2, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="producer_version", + full_name="onnx.ModelProto.producer_version", + index=3, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="domain", + full_name="onnx.ModelProto.domain", + index=4, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="model_version", + full_name="onnx.ModelProto.model_version", + index=5, + number=5, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="doc_string", + full_name="onnx.ModelProto.doc_string", + index=6, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="graph", + full_name="onnx.ModelProto.graph", + index=7, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="functions", + full_name="onnx.ModelProto.functions", + index=8, + number=100, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="metadata_props", + full_name="onnx.ModelProto.metadata_props", + index=9, + number=14, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=879, + serialized_end=1194, +) + + +_STRINGSTRINGENTRYPROTO = _descriptor.Descriptor( + name="StringStringEntryProto", + full_name="onnx.StringStringEntryProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="onnx.StringStringEntryProto.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="onnx.StringStringEntryProto.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=1196, + serialized_end=1248, +) + + +_TENSORANNOTATION = _descriptor.Descriptor( + name="TensorAnnotation", + full_name="onnx.TensorAnnotation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="tensor_name", + full_name="onnx.TensorAnnotation.tensor_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="quant_parameter_tensor_names", + full_name="onnx.TensorAnnotation.quant_parameter_tensor_names", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=1250, + serialized_end=1357, +) + + +_GRAPHPROTO = _descriptor.Descriptor( + name="GraphProto", + full_name="onnx.GraphProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="node", + full_name="onnx.GraphProto.node", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="onnx.GraphProto.name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="initializer", + full_name="onnx.GraphProto.initializer", + index=2, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="sparse_initializer", + full_name="onnx.GraphProto.sparse_initializer", + index=3, + number=15, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="doc_string", + full_name="onnx.GraphProto.doc_string", + index=4, + number=10, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="input", + full_name="onnx.GraphProto.input", + index=5, + number=11, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output", + full_name="onnx.GraphProto.output", + index=6, + number=12, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value_info", + full_name="onnx.GraphProto.value_info", + index=7, + number=13, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="quantization_annotation", + full_name="onnx.GraphProto.quantization_annotation", + index=8, + number=14, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=1360, + serialized_end=1704, +) + + +_TENSORPROTO_SEGMENT = _descriptor.Descriptor( + name="Segment", + full_name="onnx.TensorProto.Segment", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="begin", + full_name="onnx.TensorProto.Segment.begin", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="end", + full_name="onnx.TensorProto.Segment.end", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=2102, + serialized_end=2139, +) + +_TENSORPROTO = _descriptor.Descriptor( + name="TensorProto", + full_name="onnx.TensorProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="dims", + full_name="onnx.TensorProto.dims", + index=0, + number=1, + type=3, + cpp_type=2, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="data_type", + full_name="onnx.TensorProto.data_type", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="segment", + full_name="onnx.TensorProto.segment", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="float_data", + full_name="onnx.TensorProto.float_data", + index=3, + number=4, + type=2, + cpp_type=6, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\020\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="int32_data", + full_name="onnx.TensorProto.int32_data", + index=4, + number=5, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\020\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="string_data", + full_name="onnx.TensorProto.string_data", + index=5, + number=6, + type=12, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="int64_data", + full_name="onnx.TensorProto.int64_data", + index=6, + number=7, + type=3, + cpp_type=2, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\020\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="onnx.TensorProto.name", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="doc_string", + full_name="onnx.TensorProto.doc_string", + index=8, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="raw_data", + full_name="onnx.TensorProto.raw_data", + index=9, + number=9, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"", + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="external_data", + full_name="onnx.TensorProto.external_data", + index=10, + number=13, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="data_location", + full_name="onnx.TensorProto.data_location", + index=11, + number=14, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="double_data", + full_name="onnx.TensorProto.double_data", + index=12, + number=10, + type=1, + cpp_type=5, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\020\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="uint64_data", + full_name="onnx.TensorProto.uint64_data", + index=13, + number=11, + type=4, + cpp_type=4, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\020\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[ + _TENSORPROTO_SEGMENT, + ], + enum_types=[ + _TENSORPROTO_DATATYPE, + _TENSORPROTO_DATALOCATION, + ], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=1707, + serialized_end=2403, +) + + +_SPARSETENSORPROTO = _descriptor.Descriptor( + name="SparseTensorProto", + full_name="onnx.SparseTensorProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="onnx.SparseTensorProto.values", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="indices", + full_name="onnx.SparseTensorProto.indices", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="dims", + full_name="onnx.SparseTensorProto.dims", + index=2, + number=3, + type=3, + cpp_type=2, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=2405, + serialized_end=2509, +) + + +_TENSORSHAPEPROTO_DIMENSION = _descriptor.Descriptor( + name="Dimension", + full_name="onnx.TensorShapeProto.Dimension", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="dim_value", + full_name="onnx.TensorShapeProto.Dimension.dim_value", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="dim_param", + full_name="onnx.TensorShapeProto.Dimension.dim_param", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="denotation", + full_name="onnx.TensorShapeProto.Dimension.denotation", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="value", + full_name="onnx.TensorShapeProto.Dimension.value", + index=0, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[], + ), + ], + serialized_start=2579, + serialized_end=2661, +) + +_TENSORSHAPEPROTO = _descriptor.Descriptor( + name="TensorShapeProto", + full_name="onnx.TensorShapeProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="dim", + full_name="onnx.TensorShapeProto.dim", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[ + _TENSORSHAPEPROTO_DIMENSION, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=2512, + serialized_end=2661, +) + + +_TYPEPROTO_TENSOR = _descriptor.Descriptor( + name="Tensor", + full_name="onnx.TypeProto.Tensor", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="elem_type", + full_name="onnx.TypeProto.Tensor.elem_type", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="shape", + full_name="onnx.TypeProto.Tensor.shape", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=2943, + serialized_end=3009, +) + +_TYPEPROTO_SEQUENCE = _descriptor.Descriptor( + name="Sequence", + full_name="onnx.TypeProto.Sequence", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="elem_type", + full_name="onnx.TypeProto.Sequence.elem_type", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=3011, + serialized_end=3057, +) + +_TYPEPROTO_MAP = _descriptor.Descriptor( + name="Map", + full_name="onnx.TypeProto.Map", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key_type", + full_name="onnx.TypeProto.Map.key_type", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value_type", + full_name="onnx.TypeProto.Map.value_type", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=3059, + serialized_end=3119, +) + +_TYPEPROTO_SPARSETENSOR = _descriptor.Descriptor( + name="SparseTensor", + full_name="onnx.TypeProto.SparseTensor", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="elem_type", + full_name="onnx.TypeProto.SparseTensor.elem_type", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="shape", + full_name="onnx.TypeProto.SparseTensor.shape", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=3121, + serialized_end=3193, +) + +_TYPEPROTO_OPAQUE = _descriptor.Descriptor( + name="Opaque", + full_name="onnx.TypeProto.Opaque", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="domain", + full_name="onnx.TypeProto.Opaque.domain", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="onnx.TypeProto.Opaque.name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=3195, + serialized_end=3233, +) + +_TYPEPROTO = _descriptor.Descriptor( + name="TypeProto", + full_name="onnx.TypeProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="tensor_type", + full_name="onnx.TypeProto.tensor_type", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="sequence_type", + full_name="onnx.TypeProto.sequence_type", + index=1, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="map_type", + full_name="onnx.TypeProto.map_type", + index=2, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="sparse_tensor_type", + full_name="onnx.TypeProto.sparse_tensor_type", + index=3, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="opaque_type", + full_name="onnx.TypeProto.opaque_type", + index=4, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="denotation", + full_name="onnx.TypeProto.denotation", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[ + _TYPEPROTO_TENSOR, + _TYPEPROTO_SEQUENCE, + _TYPEPROTO_MAP, + _TYPEPROTO_SPARSETENSOR, + _TYPEPROTO_OPAQUE, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="value", + full_name="onnx.TypeProto.value", + index=0, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[], + ), + ], + serialized_start=2664, + serialized_end=3242, +) + + +_OPERATORSETIDPROTO = _descriptor.Descriptor( + name="OperatorSetIdProto", + full_name="onnx.OperatorSetIdProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="domain", + full_name="onnx.OperatorSetIdProto.domain", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="version", + full_name="onnx.OperatorSetIdProto.version", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=3244, + serialized_end=3297, +) + + +_FUNCTIONPROTO = _descriptor.Descriptor( + name="FunctionProto", + full_name="onnx.FunctionProto", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="onnx.FunctionProto.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="since_version", + full_name="onnx.FunctionProto.since_version", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="status", + full_name="onnx.FunctionProto.status", + index=2, + number=3, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="input", + full_name="onnx.FunctionProto.input", + index=3, + number=4, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output", + full_name="onnx.FunctionProto.output", + index=4, + number=5, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="attribute", + full_name="onnx.FunctionProto.attribute", + index=5, + number=6, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="node", + full_name="onnx.FunctionProto.node", + index=6, + number=7, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="doc_string", + full_name="onnx.FunctionProto.doc_string", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=3300, + serialized_end=3491, +) + +_ATTRIBUTEPROTO.fields_by_name["type"].enum_type = _ATTRIBUTEPROTO_ATTRIBUTETYPE +_ATTRIBUTEPROTO.fields_by_name["t"].message_type = _TENSORPROTO +_ATTRIBUTEPROTO.fields_by_name["g"].message_type = _GRAPHPROTO +_ATTRIBUTEPROTO.fields_by_name["sparse_tensor"].message_type = _SPARSETENSORPROTO +_ATTRIBUTEPROTO.fields_by_name["tensors"].message_type = _TENSORPROTO +_ATTRIBUTEPROTO.fields_by_name["graphs"].message_type = _GRAPHPROTO +_ATTRIBUTEPROTO.fields_by_name["sparse_tensors"].message_type = _SPARSETENSORPROTO +_ATTRIBUTEPROTO_ATTRIBUTETYPE.containing_type = _ATTRIBUTEPROTO +_VALUEINFOPROTO.fields_by_name["type"].message_type = _TYPEPROTO +_NODEPROTO.fields_by_name["attribute"].message_type = _ATTRIBUTEPROTO +_MODELPROTO.fields_by_name["opset_import"].message_type = _OPERATORSETIDPROTO +_MODELPROTO.fields_by_name["graph"].message_type = _GRAPHPROTO +_MODELPROTO.fields_by_name["functions"].message_type = _FUNCTIONPROTO +_MODELPROTO.fields_by_name["metadata_props"].message_type = _STRINGSTRINGENTRYPROTO +_TENSORANNOTATION.fields_by_name["quant_parameter_tensor_names"].message_type = _STRINGSTRINGENTRYPROTO +_GRAPHPROTO.fields_by_name["node"].message_type = _NODEPROTO +_GRAPHPROTO.fields_by_name["initializer"].message_type = _TENSORPROTO +_GRAPHPROTO.fields_by_name["sparse_initializer"].message_type = _SPARSETENSORPROTO +_GRAPHPROTO.fields_by_name["input"].message_type = _VALUEINFOPROTO +_GRAPHPROTO.fields_by_name["output"].message_type = _VALUEINFOPROTO +_GRAPHPROTO.fields_by_name["value_info"].message_type = _VALUEINFOPROTO +_GRAPHPROTO.fields_by_name["quantization_annotation"].message_type = _TENSORANNOTATION +_TENSORPROTO_SEGMENT.containing_type = _TENSORPROTO +_TENSORPROTO.fields_by_name["segment"].message_type = _TENSORPROTO_SEGMENT +_TENSORPROTO.fields_by_name["external_data"].message_type = _STRINGSTRINGENTRYPROTO +_TENSORPROTO.fields_by_name["data_location"].enum_type = _TENSORPROTO_DATALOCATION +_TENSORPROTO_DATATYPE.containing_type = _TENSORPROTO +_TENSORPROTO_DATALOCATION.containing_type = _TENSORPROTO +_SPARSETENSORPROTO.fields_by_name["values"].message_type = _TENSORPROTO +_SPARSETENSORPROTO.fields_by_name["indices"].message_type = _TENSORPROTO +_TENSORSHAPEPROTO_DIMENSION.containing_type = _TENSORSHAPEPROTO +_TENSORSHAPEPROTO_DIMENSION.oneofs_by_name["value"].fields.append( + _TENSORSHAPEPROTO_DIMENSION.fields_by_name["dim_value"] +) +_TENSORSHAPEPROTO_DIMENSION.fields_by_name["dim_value"].containing_oneof = _TENSORSHAPEPROTO_DIMENSION.oneofs_by_name[ + "value" +] +_TENSORSHAPEPROTO_DIMENSION.oneofs_by_name["value"].fields.append( + _TENSORSHAPEPROTO_DIMENSION.fields_by_name["dim_param"] +) +_TENSORSHAPEPROTO_DIMENSION.fields_by_name["dim_param"].containing_oneof = _TENSORSHAPEPROTO_DIMENSION.oneofs_by_name[ + "value" +] +_TENSORSHAPEPROTO.fields_by_name["dim"].message_type = _TENSORSHAPEPROTO_DIMENSION +_TYPEPROTO_TENSOR.fields_by_name["shape"].message_type = _TENSORSHAPEPROTO +_TYPEPROTO_TENSOR.containing_type = _TYPEPROTO +_TYPEPROTO_SEQUENCE.fields_by_name["elem_type"].message_type = _TYPEPROTO +_TYPEPROTO_SEQUENCE.containing_type = _TYPEPROTO +_TYPEPROTO_MAP.fields_by_name["value_type"].message_type = _TYPEPROTO +_TYPEPROTO_MAP.containing_type = _TYPEPROTO +_TYPEPROTO_SPARSETENSOR.fields_by_name["shape"].message_type = _TENSORSHAPEPROTO +_TYPEPROTO_SPARSETENSOR.containing_type = _TYPEPROTO +_TYPEPROTO_OPAQUE.containing_type = _TYPEPROTO +_TYPEPROTO.fields_by_name["tensor_type"].message_type = _TYPEPROTO_TENSOR +_TYPEPROTO.fields_by_name["sequence_type"].message_type = _TYPEPROTO_SEQUENCE +_TYPEPROTO.fields_by_name["map_type"].message_type = _TYPEPROTO_MAP +_TYPEPROTO.fields_by_name["sparse_tensor_type"].message_type = _TYPEPROTO_SPARSETENSOR +_TYPEPROTO.fields_by_name["opaque_type"].message_type = _TYPEPROTO_OPAQUE +_TYPEPROTO.oneofs_by_name["value"].fields.append(_TYPEPROTO.fields_by_name["tensor_type"]) +_TYPEPROTO.fields_by_name["tensor_type"].containing_oneof = _TYPEPROTO.oneofs_by_name["value"] +_TYPEPROTO.oneofs_by_name["value"].fields.append(_TYPEPROTO.fields_by_name["sequence_type"]) +_TYPEPROTO.fields_by_name["sequence_type"].containing_oneof = _TYPEPROTO.oneofs_by_name["value"] +_TYPEPROTO.oneofs_by_name["value"].fields.append(_TYPEPROTO.fields_by_name["map_type"]) +_TYPEPROTO.fields_by_name["map_type"].containing_oneof = _TYPEPROTO.oneofs_by_name["value"] +_TYPEPROTO.oneofs_by_name["value"].fields.append(_TYPEPROTO.fields_by_name["sparse_tensor_type"]) +_TYPEPROTO.fields_by_name["sparse_tensor_type"].containing_oneof = _TYPEPROTO.oneofs_by_name["value"] +_TYPEPROTO.oneofs_by_name["value"].fields.append(_TYPEPROTO.fields_by_name["opaque_type"]) +_TYPEPROTO.fields_by_name["opaque_type"].containing_oneof = _TYPEPROTO.oneofs_by_name["value"] +_FUNCTIONPROTO.fields_by_name["status"].enum_type = _OPERATORSTATUS +_FUNCTIONPROTO.fields_by_name["node"].message_type = _NODEPROTO +DESCRIPTOR.message_types_by_name["AttributeProto"] = _ATTRIBUTEPROTO +DESCRIPTOR.message_types_by_name["ValueInfoProto"] = _VALUEINFOPROTO +DESCRIPTOR.message_types_by_name["NodeProto"] = _NODEPROTO +DESCRIPTOR.message_types_by_name["ModelProto"] = _MODELPROTO +DESCRIPTOR.message_types_by_name["StringStringEntryProto"] = _STRINGSTRINGENTRYPROTO +DESCRIPTOR.message_types_by_name["TensorAnnotation"] = _TENSORANNOTATION +DESCRIPTOR.message_types_by_name["GraphProto"] = _GRAPHPROTO +DESCRIPTOR.message_types_by_name["TensorProto"] = _TENSORPROTO +DESCRIPTOR.message_types_by_name["SparseTensorProto"] = _SPARSETENSORPROTO +DESCRIPTOR.message_types_by_name["TensorShapeProto"] = _TENSORSHAPEPROTO +DESCRIPTOR.message_types_by_name["TypeProto"] = _TYPEPROTO +DESCRIPTOR.message_types_by_name["OperatorSetIdProto"] = _OPERATORSETIDPROTO +DESCRIPTOR.message_types_by_name["FunctionProto"] = _FUNCTIONPROTO +DESCRIPTOR.enum_types_by_name["Version"] = _VERSION +DESCRIPTOR.enum_types_by_name["OperatorStatus"] = _OPERATORSTATUS +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +AttributeProto = _reflection.GeneratedProtocolMessageType( + "AttributeProto", + (_message.Message,), + { + "DESCRIPTOR": _ATTRIBUTEPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.AttributeProto) + }, +) +_sym_db.RegisterMessage(AttributeProto) + +ValueInfoProto = _reflection.GeneratedProtocolMessageType( + "ValueInfoProto", + (_message.Message,), + { + "DESCRIPTOR": _VALUEINFOPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.ValueInfoProto) + }, +) +_sym_db.RegisterMessage(ValueInfoProto) + +NodeProto = _reflection.GeneratedProtocolMessageType( + "NodeProto", + (_message.Message,), + { + "DESCRIPTOR": _NODEPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.NodeProto) + }, +) +_sym_db.RegisterMessage(NodeProto) + +ModelProto = _reflection.GeneratedProtocolMessageType( + "ModelProto", + (_message.Message,), + { + "DESCRIPTOR": _MODELPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.ModelProto) + }, +) +_sym_db.RegisterMessage(ModelProto) + +StringStringEntryProto = _reflection.GeneratedProtocolMessageType( + "StringStringEntryProto", + (_message.Message,), + { + "DESCRIPTOR": _STRINGSTRINGENTRYPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.StringStringEntryProto) + }, +) +_sym_db.RegisterMessage(StringStringEntryProto) + +TensorAnnotation = _reflection.GeneratedProtocolMessageType( + "TensorAnnotation", + (_message.Message,), + { + "DESCRIPTOR": _TENSORANNOTATION, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TensorAnnotation) + }, +) +_sym_db.RegisterMessage(TensorAnnotation) + +GraphProto = _reflection.GeneratedProtocolMessageType( + "GraphProto", + (_message.Message,), + { + "DESCRIPTOR": _GRAPHPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.GraphProto) + }, +) +_sym_db.RegisterMessage(GraphProto) + +TensorProto = _reflection.GeneratedProtocolMessageType( + "TensorProto", + (_message.Message,), + { + "Segment": _reflection.GeneratedProtocolMessageType( + "Segment", + (_message.Message,), + { + "DESCRIPTOR": _TENSORPROTO_SEGMENT, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TensorProto.Segment) + }, + ), + "DESCRIPTOR": _TENSORPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TensorProto) + }, +) +_sym_db.RegisterMessage(TensorProto) +_sym_db.RegisterMessage(TensorProto.Segment) + +SparseTensorProto = _reflection.GeneratedProtocolMessageType( + "SparseTensorProto", + (_message.Message,), + { + "DESCRIPTOR": _SPARSETENSORPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.SparseTensorProto) + }, +) +_sym_db.RegisterMessage(SparseTensorProto) + +TensorShapeProto = _reflection.GeneratedProtocolMessageType( + "TensorShapeProto", + (_message.Message,), + { + "Dimension": _reflection.GeneratedProtocolMessageType( + "Dimension", + (_message.Message,), + { + "DESCRIPTOR": _TENSORSHAPEPROTO_DIMENSION, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TensorShapeProto.Dimension) + }, + ), + "DESCRIPTOR": _TENSORSHAPEPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TensorShapeProto) + }, +) +_sym_db.RegisterMessage(TensorShapeProto) +_sym_db.RegisterMessage(TensorShapeProto.Dimension) + +TypeProto = _reflection.GeneratedProtocolMessageType( + "TypeProto", + (_message.Message,), + { + "Tensor": _reflection.GeneratedProtocolMessageType( + "Tensor", + (_message.Message,), + { + "DESCRIPTOR": _TYPEPROTO_TENSOR, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TypeProto.Tensor) + }, + ), + "Sequence": _reflection.GeneratedProtocolMessageType( + "Sequence", + (_message.Message,), + { + "DESCRIPTOR": _TYPEPROTO_SEQUENCE, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TypeProto.Sequence) + }, + ), + "Map": _reflection.GeneratedProtocolMessageType( + "Map", + (_message.Message,), + { + "DESCRIPTOR": _TYPEPROTO_MAP, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TypeProto.Map) + }, + ), + "SparseTensor": _reflection.GeneratedProtocolMessageType( + "SparseTensor", + (_message.Message,), + { + "DESCRIPTOR": _TYPEPROTO_SPARSETENSOR, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TypeProto.SparseTensor) + }, + ), + "Opaque": _reflection.GeneratedProtocolMessageType( + "Opaque", + (_message.Message,), + { + "DESCRIPTOR": _TYPEPROTO_OPAQUE, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TypeProto.Opaque) + }, + ), + "DESCRIPTOR": _TYPEPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.TypeProto) + }, +) +_sym_db.RegisterMessage(TypeProto) +_sym_db.RegisterMessage(TypeProto.Tensor) +_sym_db.RegisterMessage(TypeProto.Sequence) +_sym_db.RegisterMessage(TypeProto.Map) +_sym_db.RegisterMessage(TypeProto.SparseTensor) +_sym_db.RegisterMessage(TypeProto.Opaque) + +OperatorSetIdProto = _reflection.GeneratedProtocolMessageType( + "OperatorSetIdProto", + (_message.Message,), + { + "DESCRIPTOR": _OPERATORSETIDPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.OperatorSetIdProto) + }, +) +_sym_db.RegisterMessage(OperatorSetIdProto) + +FunctionProto = _reflection.GeneratedProtocolMessageType( + "FunctionProto", + (_message.Message,), + { + "DESCRIPTOR": _FUNCTIONPROTO, + "__module__": "onnx_ml_pb2" + # @@protoc_insertion_point(class_scope:onnx.FunctionProto) + }, +) +_sym_db.RegisterMessage(FunctionProto) + + +_TENSORPROTO.fields_by_name["float_data"]._options = None +_TENSORPROTO.fields_by_name["int32_data"]._options = None +_TENSORPROTO.fields_by_name["int64_data"]._options = None +_TENSORPROTO.fields_by_name["double_data"]._options = None +_TENSORPROTO.fields_by_name["uint64_data"]._options = None +# @@protoc_insertion_point(module_scope) diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx_ml_pb2_grpc.py b/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx_ml_pb2_grpc.py new file mode 100644 index 0000000..8a93939 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/onnx_ml_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/predict.proto b/MLOps/serving_patterns/data_cache_pattern/src/proto/predict.proto new file mode 100644 index 0000000..e71d4c9 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/predict.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +import "onnx-ml.proto"; + +package onnxruntime.server; + +// PredictRequest specifies how inputs are mapped to tensors +// and how outputs are filtered before returning to user. +message PredictRequest { + reserved 1; + + // Input Tensors. + // This is a mapping between output name and tensor. + map inputs = 2; + + // Output Filters. + // This field is to specify which output fields need to be returned. + // If the list is empty, all outputs will be included. + repeated string output_filter = 3; +} + +// Response for PredictRequest on successful run. +message PredictResponse { + // Output Tensors. + // This is a mapping between output name and tensor. + map outputs = 1; +} \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/predict_pb2.py b/MLOps/serving_patterns/data_cache_pattern/src/proto/predict_pb2.py new file mode 100644 index 0000000..bafb9c9 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/predict_pb2.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: predict.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import src.proto.onnx_ml_pb2 as onnx__ml__pb2 + +DESCRIPTOR = _descriptor.FileDescriptor( + name="predict.proto", + package="onnxruntime.server", + syntax="proto3", + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\rpredict.proto\x12\x12onnxruntime.server\x1a\ronnx-ml.proto"\xaf\x01\n\x0ePredictRequest\x12>\n\x06inputs\x18\x02 \x03(\x0b\x32..onnxruntime.server.PredictRequest.InputsEntry\x12\x15\n\routput_filter\x18\x03 \x03(\t\x1a@\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.onnx.TensorProto:\x02\x38\x01J\x04\x08\x01\x10\x02"\x97\x01\n\x0fPredictResponse\x12\x41\n\x07outputs\x18\x01 \x03(\x0b\x32\x30.onnxruntime.server.PredictResponse.OutputsEntry\x1a\x41\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.onnx.TensorProto:\x02\x38\x01\x62\x06proto3', + dependencies=[ + onnx__ml__pb2.DESCRIPTOR, + ], +) + + +_PREDICTREQUEST_INPUTSENTRY = _descriptor.Descriptor( + name="InputsEntry", + full_name="onnxruntime.server.PredictRequest.InputsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="onnxruntime.server.PredictRequest.InputsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="onnxruntime.server.PredictRequest.InputsEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=158, + serialized_end=222, +) + +_PREDICTREQUEST = _descriptor.Descriptor( + name="PredictRequest", + full_name="onnxruntime.server.PredictRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="inputs", + full_name="onnxruntime.server.PredictRequest.inputs", + index=0, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output_filter", + full_name="onnxruntime.server.PredictRequest.output_filter", + index=1, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[ + _PREDICTREQUEST_INPUTSENTRY, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=53, + serialized_end=228, +) + + +_PREDICTRESPONSE_OUTPUTSENTRY = _descriptor.Descriptor( + name="OutputsEntry", + full_name="onnxruntime.server.PredictResponse.OutputsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="onnxruntime.server.PredictResponse.OutputsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="onnxruntime.server.PredictResponse.OutputsEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=317, + serialized_end=382, +) + +_PREDICTRESPONSE = _descriptor.Descriptor( + name="PredictResponse", + full_name="onnxruntime.server.PredictResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="outputs", + full_name="onnxruntime.server.PredictResponse.outputs", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[ + _PREDICTRESPONSE_OUTPUTSENTRY, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=231, + serialized_end=382, +) + +_PREDICTREQUEST_INPUTSENTRY.fields_by_name["value"].message_type = onnx__ml__pb2._TENSORPROTO +_PREDICTREQUEST_INPUTSENTRY.containing_type = _PREDICTREQUEST +_PREDICTREQUEST.fields_by_name["inputs"].message_type = _PREDICTREQUEST_INPUTSENTRY +_PREDICTRESPONSE_OUTPUTSENTRY.fields_by_name["value"].message_type = onnx__ml__pb2._TENSORPROTO +_PREDICTRESPONSE_OUTPUTSENTRY.containing_type = _PREDICTRESPONSE +_PREDICTRESPONSE.fields_by_name["outputs"].message_type = _PREDICTRESPONSE_OUTPUTSENTRY +DESCRIPTOR.message_types_by_name["PredictRequest"] = _PREDICTREQUEST +DESCRIPTOR.message_types_by_name["PredictResponse"] = _PREDICTRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +PredictRequest = _reflection.GeneratedProtocolMessageType( + "PredictRequest", + (_message.Message,), + { + "InputsEntry": _reflection.GeneratedProtocolMessageType( + "InputsEntry", + (_message.Message,), + { + "DESCRIPTOR": _PREDICTREQUEST_INPUTSENTRY, + "__module__": "predict_pb2" + # @@protoc_insertion_point(class_scope:onnxruntime.server.PredictRequest.InputsEntry) + }, + ), + "DESCRIPTOR": _PREDICTREQUEST, + "__module__": "predict_pb2" + # @@protoc_insertion_point(class_scope:onnxruntime.server.PredictRequest) + }, +) +_sym_db.RegisterMessage(PredictRequest) +_sym_db.RegisterMessage(PredictRequest.InputsEntry) + +PredictResponse = _reflection.GeneratedProtocolMessageType( + "PredictResponse", + (_message.Message,), + { + "OutputsEntry": _reflection.GeneratedProtocolMessageType( + "OutputsEntry", + (_message.Message,), + { + "DESCRIPTOR": _PREDICTRESPONSE_OUTPUTSENTRY, + "__module__": "predict_pb2" + # @@protoc_insertion_point(class_scope:onnxruntime.server.PredictResponse.OutputsEntry) + }, + ), + "DESCRIPTOR": _PREDICTRESPONSE, + "__module__": "predict_pb2" + # @@protoc_insertion_point(class_scope:onnxruntime.server.PredictResponse) + }, +) +_sym_db.RegisterMessage(PredictResponse) +_sym_db.RegisterMessage(PredictResponse.OutputsEntry) + + +_PREDICTREQUEST_INPUTSENTRY._options = None +_PREDICTRESPONSE_OUTPUTSENTRY._options = None +# @@protoc_insertion_point(module_scope) diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/predict_pb2_grpc.py b/MLOps/serving_patterns/data_cache_pattern/src/proto/predict_pb2_grpc.py new file mode 100644 index 0000000..8a93939 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/predict_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service.proto b/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service.proto new file mode 100644 index 0000000..268010a --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; +import "predict.proto"; + +package onnxruntime.server; + +service PredictionService { + rpc Predict(PredictRequest) returns (PredictResponse); +} \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service_pb2.py b/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service_pb2.py new file mode 100644 index 0000000..f5f807e --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service_pb2.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: prediction_service.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import src.proto.predict_pb2 as predict__pb2 + +DESCRIPTOR = _descriptor.FileDescriptor( + name="prediction_service.proto", + package="onnxruntime.server", + syntax="proto3", + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x18prediction_service.proto\x12\x12onnxruntime.server\x1a\rpredict.proto2g\n\x11PredictionService\x12R\n\x07Predict\x12".onnxruntime.server.PredictRequest\x1a#.onnxruntime.server.PredictResponseb\x06proto3', + dependencies=[ + predict__pb2.DESCRIPTOR, + ], +) + + +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +_PREDICTIONSERVICE = _descriptor.ServiceDescriptor( + name="PredictionService", + full_name="onnxruntime.server.PredictionService", + file=DESCRIPTOR, + index=0, + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_start=63, + serialized_end=166, + methods=[ + _descriptor.MethodDescriptor( + name="Predict", + full_name="onnxruntime.server.PredictionService.Predict", + index=0, + containing_service=None, + input_type=predict__pb2._PREDICTREQUEST, + output_type=predict__pb2._PREDICTRESPONSE, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + ], +) +_sym_db.RegisterServiceDescriptor(_PREDICTIONSERVICE) + +DESCRIPTOR.services_by_name["PredictionService"] = _PREDICTIONSERVICE + +# @@protoc_insertion_point(module_scope) diff --git a/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service_pb2_grpc.py b/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service_pb2_grpc.py new file mode 100644 index 0000000..79090fb --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/proto/prediction_service_pb2_grpc.py @@ -0,0 +1,76 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import src.proto.predict_pb2 as predict__pb2 + + +class PredictionServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Predict = channel.unary_unary( + "/onnxruntime.server.PredictionService/Predict", + request_serializer=predict__pb2.PredictRequest.SerializeToString, + response_deserializer=predict__pb2.PredictResponse.FromString, + ) + + +class PredictionServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Predict(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_PredictionServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "Predict": grpc.unary_unary_rpc_method_handler( + servicer.Predict, + request_deserializer=predict__pb2.PredictRequest.FromString, + response_serializer=predict__pb2.PredictResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler("onnxruntime.server.PredictionService", rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class PredictionService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Predict( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/onnxruntime.server.PredictionService/Predict", + predict__pb2.PredictRequest.SerializeToString, + predict__pb2.PredictResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/MLOps/serving_patterns/data_cache_pattern/src/utils/__init__.py b/MLOps/serving_patterns/data_cache_pattern/src/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/data_cache_pattern/src/utils/logging.conf b/MLOps/serving_patterns/data_cache_pattern/src/utils/logging.conf new file mode 100644 index 0000000..490b0c1 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/utils/logging.conf @@ -0,0 +1,78 @@ +[loggers] +keys=root, gunicorn.error, gunicorn.access, uvicorn.error, uvicorn.access + +[logger_root] +level=DEBUG +handlers=console + +[logger_gunicorn.error] +level=DEBUG +handlers=gunicorn_error_file, console +propagate=1 +qualname=gunicorn.error + +[logger_gunicorn.access] +level=INFO +handlers=gunicorn_access_file, console +propagate=1 +qualname=gunicorn.access + +[logger_uvicorn.error] +level=DEBUG +handlers=uvicorn_error_file, console +propagate=1 +qualname=uvicorn.error + +[logger_uvicorn.access] +level=INFO +handlers=uvicorn_access_file, console +propagate=1 +qualname=gunicorn.access + + +[handlers] +keys=console, gunicorn_error_file, gunicorn_access_file, uvicorn_error_file, uvicorn_access_file + +[handler_console] +class=StreamHandler +formatter=generic +args=(sys.stdout, ) + +[handler_gunicorn_error_file] +class=logging.FileHandler +formatter=generic +args=('/var/log/gunicorn_error.log', 'a') + +[handler_gunicorn_access_file] +class=logging.FileHandler +formatter=gunicorn_access +args=('/var/log/gunicorn_access.log', 'a') + +[handler_uvicorn_error_file] +class=logging.FileHandler +formatter=generic +args=('/var/log/uvicorn_error.log', 'a') + +[handler_uvicorn_access_file] +class=logging.FileHandler +formatter=uvicorn_access +args=('/var/log/uvicorn_access.log', 'a') + + +[formatters] +keys=generic, gunicorn_access, uvicorn_access + +[formatter_generic] +class=logging.Formatter +format=[%(asctime)s] [%(levelname)s] [%(process)d] [%(name)s] [%(funcName)s] [%(lineno)d] %(message)s +datefmt=%Y-%m-%d %H:%M:%S + +[formatter_gunicorn_access] +class=logging.Formatter +format=[%(asctime)s] %(h)s %(l)s %(u)s %(t)s %(r)s %(m)s %(U)s %(q)s %(H)s %(s)s %(b)s %(f)s %(a)s %(D)s %(p)s +datefmt=%d/%b/%Y:%H:%M:%S (%Z) + +[formatter_uvicorn_access] +class=logging.Formatter +format=[%(asctime)s] [%(levelname)s] [%(process)d] [%(name)s] [%(funcName)s] [%(lineno)d] %(message)s +datefmt=%d/%b/%Y:%H:%M:%S (%Z) \ No newline at end of file diff --git a/MLOps/serving_patterns/data_cache_pattern/src/utils/profiler.py b/MLOps/serving_patterns/data_cache_pattern/src/utils/profiler.py new file mode 100644 index 0000000..8f7b1b9 --- /dev/null +++ b/MLOps/serving_patterns/data_cache_pattern/src/utils/profiler.py @@ -0,0 +1,23 @@ +import cProfile +import os +from logging import getLogger + +logger = getLogger(__name__) + + +def do_cprofile(func): + def profiled_func(*args, **kwargs): + enable_profile = int(os.getenv("PROFILE", 1)) + if enable_profile: + profile = cProfile.Profile() + try: + profile.enable() + result = func(*args, **kwargs) + profile.disable() + return result + finally: + profile.print_stats() + else: + return func(*args, **kwargs) + + return profiled_func diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/executionHistory/executionHistory.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/executionHistory/executionHistory.bin new file mode 100644 index 0000000..78428f1 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/executionHistory/executionHistory.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/executionHistory/executionHistory.lock b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/executionHistory/executionHistory.lock new file mode 100644 index 0000000..a851c95 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/executionHistory/executionHistory.lock differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileChanges/last-build.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileChanges/last-build.bin new file mode 100644 index 0000000..f76dd23 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileChanges/last-build.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileContent/fileContent.lock b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileContent/fileContent.lock new file mode 100644 index 0000000..4f05887 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileContent/fileContent.lock differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/fileHashes.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/fileHashes.bin new file mode 100644 index 0000000..7d830af Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/fileHashes.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/fileHashes.lock b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/fileHashes.lock new file mode 100644 index 0000000..fb6f866 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/fileHashes.lock differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/resourceHashesCache.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/resourceHashesCache.bin new file mode 100644 index 0000000..2466f1c Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/fileHashes/resourceHashesCache.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/gc.properties b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/gc.properties new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/classAnalysis.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/classAnalysis.bin new file mode 100644 index 0000000..961b509 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/classAnalysis.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/jarAnalysis.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/jarAnalysis.bin new file mode 100644 index 0000000..d5a5237 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/jarAnalysis.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/javaCompile.lock b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/javaCompile.lock new file mode 100644 index 0000000..c9c8ecf Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/javaCompile.lock differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/taskHistory.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/taskHistory.bin new file mode 100644 index 0000000..352b5c6 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/6.5/javaCompile/taskHistory.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/buildOutputCleanup.lock b/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/buildOutputCleanup.lock new file mode 100644 index 0000000..acc4237 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/buildOutputCleanup.lock differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/cache.properties b/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/cache.properties new file mode 100644 index 0000000..27ca4d7 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/cache.properties @@ -0,0 +1,2 @@ +#Thu Feb 11 12:42:41 JST 2021 +gradle.version=6.5 diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/outputFiles.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/outputFiles.bin new file mode 100644 index 0000000..53d6284 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/buildOutputCleanup/outputFiles.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/checksums.lock b/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/checksums.lock new file mode 100644 index 0000000..e1d14df Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/checksums.lock differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/md5-checksums.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/md5-checksums.bin new file mode 100644 index 0000000..a1643de Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/md5-checksums.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/sha1-checksums.bin b/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/sha1-checksums.bin new file mode 100644 index 0000000..bf56b5b Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.gradle/checksums/sha1-checksums.bin differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.gradle/vcs-1/gc.properties b/MLOps/serving_patterns/edge_ai_pattern/.gradle/vcs-1/gc.properties new file mode 100644 index 0000000..e69de29 diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/.gitignore b/MLOps/serving_patterns/edge_ai_pattern/.idea/.gitignore new file mode 100644 index 0000000..26d3352 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/.name b/MLOps/serving_patterns/edge_ai_pattern/.idea/.name new file mode 100644 index 0000000..d64a2e6 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/.name @@ -0,0 +1 @@ +TFLitePyTorch \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/caches/build_file_checksums.ser b/MLOps/serving_patterns/edge_ai_pattern/.idea/caches/build_file_checksums.ser new file mode 100644 index 0000000..e346c26 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/.idea/caches/build_file_checksums.ser differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/compiler.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/compiler.xml new file mode 100644 index 0000000..61a9130 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/compiler.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/gradle.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/gradle.xml new file mode 100644 index 0000000..acf0820 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/gradle.xml @@ -0,0 +1,24 @@ + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/jarRepositories.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/jarRepositories.xml new file mode 100644 index 0000000..a5f05cd --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/jarRepositories.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_activity_activity_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_activity_activity_1_0_0_aar.xml new file mode 100644 index 0000000..127ef7c --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_activity_activity_1_0_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_annotation_annotation_1_1_0.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_annotation_annotation_1_1_0.xml new file mode 100644 index 0000000..b2158ac --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_annotation_annotation_1_1_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_appcompat_appcompat_1_2_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_appcompat_appcompat_1_2_0_aar.xml new file mode 100644 index 0000000..db947a3 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_appcompat_appcompat_1_2_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_appcompat_appcompat_resources_1_2_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_appcompat_appcompat_resources_1_2_0_aar.xml new file mode 100644 index 0000000..22040d5 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_appcompat_appcompat_resources_1_2_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_arch_core_core_common_2_1_0.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_arch_core_core_common_2_1_0.xml new file mode 100644 index 0000000..2208415 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_arch_core_core_common_2_1_0.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_arch_core_core_runtime_2_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_arch_core_core_runtime_2_0_0_aar.xml new file mode 100644 index 0000000..7b529e5 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_arch_core_core_runtime_2_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_camera_camera_camera2_1_0_0_alpha05_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_camera_camera_camera2_1_0_0_alpha05_aar.xml new file mode 100644 index 0000000..d9710f3 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_camera_camera_camera2_1_0_0_alpha05_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_camera_camera_core_1_0_0_alpha05_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_camera_camera_core_1_0_0_alpha05_aar.xml new file mode 100644 index 0000000..601d954 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_camera_camera_core_1_0_0_alpha05_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_collection_collection_1_1_0.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_collection_collection_1_1_0.xml new file mode 100644 index 0000000..eafc05e --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_collection_collection_1_1_0.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_concurrent_concurrent_futures_1_0_0_alpha03.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_concurrent_concurrent_futures_1_0_0_alpha03.xml new file mode 100644 index 0000000..395799a --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_concurrent_concurrent_futures_1_0_0_alpha03.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_constraintlayout_constraintlayout_2_0_4_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_constraintlayout_constraintlayout_2_0_4_aar.xml new file mode 100644 index 0000000..db28186 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_constraintlayout_constraintlayout_2_0_4_aar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_constraintlayout_constraintlayout_solver_2_0_4.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_constraintlayout_constraintlayout_solver_2_0_4.xml new file mode 100644 index 0000000..cba1dae --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_constraintlayout_constraintlayout_solver_2_0_4.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_core_core_1_3_2_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_core_core_1_3_2_aar.xml new file mode 100644 index 0000000..7235ac7 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_core_core_1_3_2_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_core_core_ktx_1_3_2_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_core_core_ktx_1_3_2_aar.xml new file mode 100644 index 0000000..461a1cf --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_core_core_ktx_1_3_2_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_cursoradapter_cursoradapter_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_cursoradapter_cursoradapter_1_0_0_aar.xml new file mode 100644 index 0000000..1c786da --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_cursoradapter_cursoradapter_1_0_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_customview_customview_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_customview_customview_1_0_0_aar.xml new file mode 100644 index 0000000..a81b8f8 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_customview_customview_1_0_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_drawerlayout_drawerlayout_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_drawerlayout_drawerlayout_1_0_0_aar.xml new file mode 100644 index 0000000..10fcac2 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_drawerlayout_drawerlayout_1_0_0_aar.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_exifinterface_exifinterface_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_exifinterface_exifinterface_1_0_0_aar.xml new file mode 100644 index 0000000..52d8ae5 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_exifinterface_exifinterface_1_0_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_fragment_fragment_1_1_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_fragment_fragment_1_1_0_aar.xml new file mode 100644 index 0000000..7b7f49c --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_fragment_fragment_1_1_0_aar.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_interpolator_interpolator_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_interpolator_interpolator_1_0_0_aar.xml new file mode 100644 index 0000000..02d9746 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_interpolator_interpolator_1_0_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_common_2_1_0.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_common_2_1_0.xml new file mode 100644 index 0000000..9354d44 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_common_2_1_0.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_livedata_2_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_livedata_2_0_0_aar.xml new file mode 100644 index 0000000..db63952 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_livedata_2_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_livedata_core_2_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_livedata_core_2_0_0_aar.xml new file mode 100644 index 0000000..a532866 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_livedata_core_2_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_runtime_2_1_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_runtime_2_1_0_aar.xml new file mode 100644 index 0000000..8b2caf4 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_runtime_2_1_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_viewmodel_2_1_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_viewmodel_2_1_0_aar.xml new file mode 100644 index 0000000..28236b3 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_lifecycle_lifecycle_viewmodel_2_1_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_loader_loader_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_loader_loader_1_0_0_aar.xml new file mode 100644 index 0000000..74d6443 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_loader_loader_1_0_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_savedstate_savedstate_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_savedstate_savedstate_1_0_0_aar.xml new file mode 100644 index 0000000..478561c --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_savedstate_savedstate_1_0_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_core_1_3_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_core_1_3_0_aar.xml new file mode 100644 index 0000000..17c0885 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_core_1_3_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_espresso_espresso_core_3_3_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_espresso_espresso_core_3_3_0_aar.xml new file mode 100644 index 0000000..ab3021c --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_espresso_espresso_core_3_3_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_espresso_espresso_idling_resource_3_3_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_espresso_espresso_idling_resource_3_3_0_aar.xml new file mode 100644 index 0000000..ea72292 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_espresso_espresso_idling_resource_3_3_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_ext_junit_1_1_2_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_ext_junit_1_1_2_aar.xml new file mode 100644 index 0000000..5e2943c --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_ext_junit_1_1_2_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_monitor_1_3_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_monitor_1_3_0_aar.xml new file mode 100644 index 0000000..f2b38c8 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_monitor_1_3_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_runner_1_3_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_runner_1_3_0_aar.xml new file mode 100644 index 0000000..243fdea --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_test_runner_1_3_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_vectordrawable_vectordrawable_1_1_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_vectordrawable_vectordrawable_1_1_0_aar.xml new file mode 100644 index 0000000..5de13b1 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_vectordrawable_vectordrawable_1_1_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_vectordrawable_vectordrawable_animated_1_1_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_vectordrawable_vectordrawable_animated_1_1_0_aar.xml new file mode 100644 index 0000000..4dc90c9 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_vectordrawable_vectordrawable_animated_1_1_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_versionedparcelable_versionedparcelable_1_1_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_versionedparcelable_versionedparcelable_1_1_0_aar.xml new file mode 100644 index 0000000..37288b9 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_versionedparcelable_versionedparcelable_1_1_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_viewpager_viewpager_1_0_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_viewpager_viewpager_1_0_0_aar.xml new file mode 100644 index 0000000..cf04fa9 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__androidx_viewpager_viewpager_1_0_0_aar.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_auto_value_auto_value_annotations_1_6_3.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_auto_value_auto_value_annotations_1_6_3.xml new file mode 100644 index 0000000..9f84cac --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_auto_value_auto_value_annotations_1_6_3.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_2_0_1.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_2_0_1.xml new file mode 100644 index 0000000..2b834ea --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_2_0_1.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_guava_listenablefuture_1_0.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_guava_listenablefuture_1_0.xml new file mode 100644 index 0000000..09da23b --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_google_guava_listenablefuture_1_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_squareup_javawriter_2_1_1.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_squareup_javawriter_2_1_1.xml new file mode 100644 index 0000000..662b001 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__com_squareup_javawriter_2_1_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__javax_inject_javax_inject_1.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__javax_inject_javax_inject_1.xml new file mode 100644 index 0000000..62012ea --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__javax_inject_javax_inject_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__junit_junit_4_12.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__junit_junit_4_12.xml new file mode 100644 index 0000000..6c078d6 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__junit_junit_4_12.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__junit_junit_4_13_1.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__junit_junit_4_13_1.xml new file mode 100644 index 0000000..4405e64 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__junit_junit_4_13_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_checkerframework_checker_qual_2_5_8.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_checkerframework_checker_qual_2_5_8.xml new file mode 100644 index 0000000..8584336 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_checkerframework_checker_qual_2_5_8.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml new file mode 100644 index 0000000..09cf23d --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_integration_1_3.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_integration_1_3.xml new file mode 100644 index 0000000..1a77dd8 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_integration_1_3.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_library_1_3.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_library_1_3.xml new file mode 100644 index 0000000..3d45e8e --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_hamcrest_hamcrest_library_1_3.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_annotations_13_0.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_annotations_13_0.xml new file mode 100644 index 0000000..1fa0fa9 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_annotations_13_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_android_extensions_runtime_1_4_20.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_android_extensions_runtime_1_4_20.xml new file mode 100644 index 0000000..51db252 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_android_extensions_runtime_1_4_20.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_1_4_20.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_1_4_20.xml new file mode 100644 index 0000000..ab42e17 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_1_4_20.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_common_1_4_20.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_common_1_4_20.xml new file mode 100644 index 0000000..2b85444 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_common_1_4_20.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_jdk7_1_4_20.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_jdk7_1_4_20.xml new file mode 100644 index 0000000..652c287 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_jetbrains_kotlin_kotlin_stdlib_jdk7_1_4_20.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_1_4_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_1_4_0_aar.xml new file mode 100644 index 0000000..b5ccdc8 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_1_4_0_aar.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_fbjni_1_4_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_fbjni_1_4_0_aar.xml new file mode 100644 index 0000000..0416583 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_fbjni_1_4_0_aar.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_torchvision_1_4_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_torchvision_1_4_0_aar.xml new file mode 100644 index 0000000..e928aaa --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_pytorch_pytorch_android_torchvision_1_4_0_aar.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_2_4_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_2_4_0_aar.xml new file mode 100644 index 0000000..19c758b --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_2_4_0_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_gpu_2_4_0_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_gpu_2_4_0_aar.xml new file mode 100644 index 0000000..9cfb11e --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_gpu_2_4_0_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_support_0_0_0_nightly_aar.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_support_0_0_0_nightly_aar.xml new file mode 100644 index 0000000..d1f9aad --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/libraries/Gradle__org_tensorflow_tensorflow_lite_support_0_0_0_nightly_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/misc.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/misc.xml new file mode 100644 index 0000000..d5d35ec --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/misc.xml @@ -0,0 +1,9 @@ + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/modules.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/modules.xml new file mode 100644 index 0000000..d839fd3 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/modules.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/modules/TFLitePyTorch.iml b/MLOps/serving_patterns/edge_ai_pattern/.idea/modules/TFLitePyTorch.iml new file mode 100644 index 0000000..d37822a --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/modules/TFLitePyTorch.iml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/modules/app/TFLitePyTorch.app.iml b/MLOps/serving_patterns/edge_ai_pattern/.idea/modules/app/TFLitePyTorch.app.iml new file mode 100644 index 0000000..2ec283a --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/modules/app/TFLitePyTorch.app.iml @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/.idea/vcs.xml b/MLOps/serving_patterns/edge_ai_pattern/.idea/vcs.xml new file mode 100644 index 0000000..b2bdec2 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/LICENSE b/MLOps/serving_patterns/edge_ai_pattern/LICENSE new file mode 100644 index 0000000..7550992 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 shibuiwilliam + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/MLOps/serving_patterns/edge_ai_pattern/README.md b/MLOps/serving_patterns/edge_ai_pattern/README.md new file mode 100644 index 0000000..f905d85 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/README.md @@ -0,0 +1,41 @@ +# Edge AI 패턴 - TFLite / PyTorch Mobile + +[![](http://img.youtube.com/vi/3XOwdP7Mv2Y/0.jpg)](http://www.youtube.com/watch?v=3XOwdP7Mv2Y) + +## 목적 + +TensorflowLite 또는 Pytorch Mobile 로 이미지 인식 모델을 사용해, Android 에서 카메라에 찍힌 피사체를 분류하는 애플리케이션을 동작합니다. + +## 전제 + +- Android +- Android 단말 + +Android Studio 는 아래를 참조해 주십시오. + +- [Android Studio](https://developer.android.com/studio/install) + +# 사용법 + +1. Android Studio 를 기동하고, 본 디렉토리를 읽어옵니다. + +Android Studio 기동화면 +![img](./img/start.png) + +디렉토리 선택 + +![img](./img/files.png) + +2. 애플리케이션 인스톨 + +Android Studio 에서 애플리케이션을 선택하면 아래와 같은 화면이 나타납니다. + +![img](./img/install.png) + +Android 스마트폰을 단말에 접속합니다. +상부의 에뮬레이터 환경에서 Android 스마트폰이 인식됩니다. +플레이 버튼(▶)을 눌러, 애플리케이션을 Android 에 인스톨합니다. + +3. 애플리케이션 사용하기 + +![img](./img/edge_ai.png) diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/.gitignore b/MLOps/serving_patterns/edge_ai_pattern/app/.gitignore new file mode 100644 index 0000000..796b96d --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/build.gradle b/MLOps/serving_patterns/edge_ai_pattern/app/build.gradle new file mode 100644 index 0000000..e7705d6 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/build.gradle @@ -0,0 +1,61 @@ +apply plugin: 'com.android.application' +apply plugin: 'kotlin-android' +apply plugin: 'kotlin-android-extensions' + +android { + compileSdkVersion 29 + buildToolsVersion "29.0.2" + + defaultConfig { + applicationId "com.shibuiwilliam.tflitepytorch" + minSdkVersion 25 + targetSdkVersion 29 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + aaptOptions { + noCompress "tflite" + } + compileOptions { + sourceCompatibility = '1.8' + targetCompatibility = '1.8' + } + lintOptions { + abortOnError false + } +} + + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" + implementation 'androidx.appcompat:appcompat:1.2.0' + implementation 'androidx.core:core-ktx:1.3.2' + testImplementation 'junit:junit:4.13.1' + androidTestImplementation 'androidx.test.ext:junit:1.1.2' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' + implementation 'androidx.constraintlayout:constraintlayout:2.0.4' + + def camerax_version = "1.0.0-alpha05" + implementation "androidx.camera:camera-core:$camerax_version" + implementation "androidx.camera:camera-camera2:$camerax_version" + + // Build off of nightly TensorFlow Lite + implementation 'org.tensorflow:tensorflow-lite:2.4.0' + implementation 'org.tensorflow:tensorflow-lite-gpu:2.4.0' + implementation 'org.tensorflow:tensorflow-lite-support:0.0.0-nightly' + // Use local TensorFlow library + // implementation 'org.tensorflow:tensorflow-lite-local:0.0.0' + + implementation 'org.pytorch:pytorch_android:1.4.0' + implementation 'org.pytorch:pytorch_android_torchvision:1.4.0' +} diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/download.gradle b/MLOps/serving_patterns/edge_ai_pattern/app/download.gradle new file mode 100644 index 0000000..f58bc7c --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/download.gradle @@ -0,0 +1,103 @@ +def targetFolder = "src/main/assets" +def modelFloatDownloadUrl = "http://download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz" +def modelQuantDownloadUrl = "http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_224_quant.tgz" +def modelEfficientNetFloatDownloadUrl = "http://download.tensorflow.org/models/tflite/efficientnet-lite/efficientnet-lite0-fp32_2020_03_03.zip" +def modelEfficientNetQuantDownloadUrl = "http://download.tensorflow.org/models/tflite/efficientnet-lite/efficientnet-lite0-int8_2020_03_03.zip" +def localCacheFloat = "build/intermediates/mobilenet_v1_1.0_224.tgz" +def localCacheQuant = "build/intermediates/mobilenet_v1_1.0_224_quant.tgz" +def localCacheEfficientNetFloat = "build/intermediates/efficientnet-lite0-fp32_2020_03_03.zip" +def localCacheEfficientNetQuant = "build/intermediates/efficientnet-lite0-int8_2020_03_03.zip" + + +task downloadModelFloat(type: DownloadUrlTask) { + doFirst { + println "Downloading ${modelFloatDownloadUrl}" + } + sourceUrl = "${modelFloatDownloadUrl}" + target = file("${localCacheFloat}") +} + +task downloadModelQuant(type: DownloadUrlTask) { + doFirst { + println "Downloading ${modelQuantDownloadUrl}" + } + sourceUrl = "${modelQuantDownloadUrl}" + target = file("${localCacheQuant}") +} + +task downloadEfficientNetFloat(type: DownloadUrlTask) { + doFirst { + println "Downloading ${modelEfficientNetFloatDownloadUrl}" + } + sourceUrl = "${modelEfficientNetFloatDownloadUrl}" + target = file("${localCacheEfficientNetFloat}") +} + +task downloadEfficientNetQuant(type: DownloadUrlTask) { + doFirst { + println "Downloading ${modelEfficientNetQuantDownloadUrl}" + } + sourceUrl = "${modelEfficientNetQuantDownloadUrl}" + target = file("${localCacheEfficientNetQuant}") +} + +task unzipModelFloat(type: Copy, dependsOn: 'downloadModelFloat') { + doFirst { + println "Unzipping ${localCacheFloat}" + } + from tarTree("${localCacheFloat}") + into "${targetFolder}" +} + +task unzipModelQuant(type: Copy, dependsOn: 'downloadModelQuant') { + doFirst { + println "Unzipping ${localCacheQuant}" + } + from tarTree("${localCacheQuant}") + into "${targetFolder}" +} + +task unzipModelEfficientNetFloat(type: Copy, dependsOn: 'downloadEfficientNetFloat') { + doFirst { + println "Unzipping ${localCacheEfficientNetFloat}" + } + from zipTree("${localCacheEfficientNetFloat}") + into "${targetFolder}" +} + +task unzipModelEfficientNetQuant(type: Copy, dependsOn: 'downloadEfficientNetQuant') { + doFirst { + println "Unzipping ${localCacheEfficientNetQuant}" + } + from zipTree("${localCacheEfficientNetQuant}") + into "${targetFolder}" +} + +task cleanUnusedFiles(type: Delete, dependsOn: ['unzipModelFloat', 'unzipModelQuant', 'unzipModelEfficientNetFloat', 'unzipModelEfficientNetQuant']) { + delete fileTree("${targetFolder}").matching { + include "*.pb" + include "*.ckpt.*" + include "*.pbtxt*" + include "*.quant_info.*" + include "*.meta" + } +} + +class DownloadUrlTask extends DefaultTask { + @Input + String sourceUrl + + @OutputFile + File target + + @TaskAction + void download() { + ant.get(src: sourceUrl, dest: target) + } +} + +preBuild.dependsOn unzipModelFloat +preBuild.dependsOn unzipModelQuant +preBuild.dependsOn unzipModelEfficientNetFloat +preBuild.dependsOn unzipModelEfficientNetQuant +preBuild.dependsOn cleanUnusedFiles \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/proguard-rules.pro b/MLOps/serving_patterns/edge_ai_pattern/app/proguard-rules.pro new file mode 100644 index 0000000..f1b4245 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/androidTest/java/com/shibuiwilliam/tflitepytorch/ExampleInstrumentedTest.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/androidTest/java/com/shibuiwilliam/tflitepytorch/ExampleInstrumentedTest.kt new file mode 100644 index 0000000..0ff9c91 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/androidTest/java/com/shibuiwilliam/tflitepytorch/ExampleInstrumentedTest.kt @@ -0,0 +1,24 @@ +package com.shibuiwilliam.tflitepytorch + +import androidx.test.platform.app.InstrumentationRegistry +import androidx.test.ext.junit.runners.AndroidJUnit4 + +import org.junit.Test +import org.junit.runner.RunWith + +import org.junit.Assert.* + +/** + * Instrumented test, which will execute on an Android device. + * + * See [testing documentation](http://d.android.com/tools/testing). + */ +@RunWith(AndroidJUnit4::class) +class ExampleInstrumentedTest { + @Test + fun useAppContext() { + // Context of the app under test. + val appContext = InstrumentationRegistry.getInstrumentation().targetContext + assertEquals("com.shibuiwilliam.tflitepytorch", appContext.packageName) + } +} diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/AndroidManifest.xml b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000..a74d165 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/AndroidManifest.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/imagenet_labels.txt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/imagenet_labels.txt new file mode 100644 index 0000000..fe81123 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/imagenet_labels.txt @@ -0,0 +1,1001 @@ +background +tench +goldfish +great white shark +tiger shark +hammerhead +electric ray +stingray +cock +hen +ostrich +brambling +goldfinch +house finch +junco +indigo bunting +robin +bulbul +jay +magpie +chickadee +water ouzel +kite +bald eagle +vulture +great grey owl +European fire salamander +common newt +eft +spotted salamander +axolotl +bullfrog +tree frog +tailed frog +loggerhead +leatherback turtle +mud turtle +terrapin +box turtle +banded gecko +common iguana +American chameleon +whiptail +agama +frilled lizard +alligator lizard +Gila monster +green lizard +African chameleon +Komodo dragon +African crocodile +American alligator +triceratops +thunder snake +ringneck snake +hognose snake +green snake +king snake +garter snake +water snake +vine snake +night snake +boa constrictor +rock python +Indian cobra +green mamba +sea snake +horned viper +diamondback +sidewinder +trilobite +harvestman +scorpion +black and gold garden spider +barn spider +garden spider +black widow +tarantula +wolf spider +tick +centipede +black grouse +ptarmigan +ruffed grouse +prairie chicken +peacock +quail +partridge +African grey +macaw +sulphur-crested cockatoo +lorikeet +coucal +bee eater +hornbill +hummingbird +jacamar +toucan +drake +red-breasted merganser +goose +black swan +tusker +echidna +platypus +wallaby +koala +wombat +jellyfish +sea anemone +brain coral +flatworm +nematode +conch +snail +slug +sea slug +chiton +chambered nautilus +Dungeness crab +rock crab +fiddler crab +king crab +American lobster +spiny lobster +crayfish +hermit crab +isopod +white stork +black stork +spoonbill +flamingo +little blue heron +American egret +bittern +crane +limpkin +European gallinule +American coot +bustard +ruddy turnstone +red-backed sandpiper +redshank +dowitcher +oystercatcher +pelican +king penguin +albatross +grey whale +killer whale +dugong +sea lion +Chihuahua +Japanese spaniel +Maltese dog +Pekinese +Shih-Tzu +Blenheim spaniel +papillon +toy terrier +Rhodesian ridgeback +Afghan hound +basset +beagle +bloodhound +bluetick +black-and-tan coonhound +Walker hound +English foxhound +redbone +borzoi +Irish wolfhound +Italian greyhound +whippet +Ibizan hound +Norwegian elkhound +otterhound +Saluki +Scottish deerhound +Weimaraner +Staffordshire bullterrier +American Staffordshire terrier +Bedlington terrier +Border terrier +Kerry blue terrier +Irish terrier +Norfolk terrier +Norwich terrier +Yorkshire terrier +wire-haired fox terrier +Lakeland terrier +Sealyham terrier +Airedale +cairn +Australian terrier +Dandie Dinmont +Boston bull +miniature schnauzer +giant schnauzer +standard schnauzer +Scotch terrier +Tibetan terrier +silky terrier +soft-coated wheaten terrier +West Highland white terrier +Lhasa +flat-coated retriever +curly-coated retriever +golden retriever +Labrador retriever +Chesapeake Bay retriever +German short-haired pointer +vizsla +English setter +Irish setter +Gordon setter +Brittany spaniel +clumber +English springer +Welsh springer spaniel +cocker spaniel +Sussex spaniel +Irish water spaniel +kuvasz +schipperke +groenendael +malinois +briard +kelpie +komondor +Old English sheepdog +Shetland sheepdog +collie +Border collie +Bouvier des Flandres +Rottweiler +German shepherd +Doberman +miniature pinscher +Greater Swiss Mountain dog +Bernese mountain dog +Appenzeller +EntleBucher +boxer +bull mastiff +Tibetan mastiff +French bulldog +Great Dane +Saint Bernard +Eskimo dog +malamute +Siberian husky +dalmatian +affenpinscher +basenji +pug +Leonberg +Newfoundland +Great Pyrenees +Samoyed +Pomeranian +chow +keeshond +Brabancon griffon +Pembroke +Cardigan +toy poodle +miniature poodle +standard poodle +Mexican hairless +timber wolf +white wolf +red wolf +coyote +dingo +dhole +African hunting dog +hyena +red fox +kit fox +Arctic fox +grey fox +tabby +tiger cat +Persian cat +Siamese cat +Egyptian cat +cougar +lynx +leopard +snow leopard +jaguar +lion +tiger +cheetah +brown bear +American black bear +ice bear +sloth bear +mongoose +meerkat +tiger beetle +ladybug +ground beetle +long-horned beetle +leaf beetle +dung beetle +rhinoceros beetle +weevil +fly +bee +ant +grasshopper +cricket +walking stick +cockroach +mantis +cicada +leafhopper +lacewing +dragonfly +damselfly +admiral +ringlet +monarch +cabbage butterfly +sulphur butterfly +lycaenid +starfish +sea urchin +sea cucumber +wood rabbit +hare +Angora +hamster +porcupine +fox squirrel +marmot +beaver +guinea pig +sorrel +zebra +hog +wild boar +warthog +hippopotamus +ox +water buffalo +bison +ram +bighorn +ibex +hartebeest +impala +gazelle +Arabian camel +llama +weasel +mink +polecat +black-footed ferret +otter +skunk +badger +armadillo +three-toed sloth +orangutan +gorilla +chimpanzee +gibbon +siamang +guenon +patas +baboon +macaque +langur +colobus +proboscis monkey +marmoset +capuchin +howler monkey +titi +spider monkey +squirrel monkey +Madagascar cat +indri +Indian elephant +African elephant +lesser panda +giant panda +barracouta +eel +coho +rock beauty +anemone fish +sturgeon +gar +lionfish +puffer +abacus +abaya +academic gown +accordion +acoustic guitar +aircraft carrier +airliner +airship +altar +ambulance +amphibian +analog clock +apiary +apron +ashcan +assault rifle +backpack +bakery +balance beam +balloon +ballpoint +Band Aid +banjo +bannister +barbell +barber chair +barbershop +barn +barometer +barrel +barrow +baseball +basketball +bassinet +bassoon +bathing cap +bath towel +bathtub +beach wagon +beacon +beaker +bearskin +beer bottle +beer glass +bell cote +bib +bicycle-built-for-two +bikini +binder +binoculars +birdhouse +boathouse +bobsled +bolo tie +bonnet +bookcase +bookshop +bottlecap +bow +bow tie +brass +brassiere +breakwater +breastplate +broom +bucket +buckle +bulletproof vest +bullet train +butcher shop +cab +caldron +candle +cannon +canoe +can opener +cardigan +car mirror +carousel +carpenter's kit +carton +car wheel +cash machine +cassette +cassette player +castle +catamaran +CD player +cello +cellular telephone +chain +chainlink fence +chain mail +chain saw +chest +chiffonier +chime +china cabinet +Christmas stocking +church +cinema +cleaver +cliff dwelling +cloak +clog +cocktail shaker +coffee mug +coffeepot +coil +combination lock +computer keyboard +confectionery +container ship +convertible +corkscrew +cornet +cowboy boot +cowboy hat +cradle +crane +crash helmet +crate +crib +Crock Pot +croquet ball +crutch +cuirass +dam +desk +desktop computer +dial telephone +diaper +digital clock +digital watch +dining table +dishrag +dishwasher +disk brake +dock +dogsled +dome +doormat +drilling platform +drum +drumstick +dumbbell +Dutch oven +electric fan +electric guitar +electric locomotive +entertainment center +envelope +espresso maker +face powder +feather boa +file +fireboat +fire engine +fire screen +flagpole +flute +folding chair +football helmet +forklift +fountain +fountain pen +four-poster +freight car +French horn +frying pan +fur coat +garbage truck +gasmask +gas pump +goblet +go-kart +golf ball +golfcart +gondola +gong +gown +grand piano +greenhouse +grille +grocery store +guillotine +hair slide +hair spray +half track +hammer +hamper +hand blower +hand-held computer +handkerchief +hard disc +harmonica +harp +harvester +hatchet +holster +home theater +honeycomb +hook +hoopskirt +horizontal bar +horse cart +hourglass +iPod +iron +jack-o'-lantern +jean +jeep +jersey +jigsaw puzzle +jinrikisha +joystick +kimono +knee pad +knot +lab coat +ladle +lampshade +laptop +lawn mower +lens cap +letter opener +library +lifeboat +lighter +limousine +liner +lipstick +Loafer +lotion +loudspeaker +loupe +lumbermill +magnetic compass +mailbag +mailbox +maillot +maillot +manhole cover +maraca +marimba +mask +matchstick +maypole +maze +measuring cup +medicine chest +megalith +microphone +microwave +military uniform +milk can +minibus +miniskirt +minivan +missile +mitten +mixing bowl +mobile home +Model T +modem +monastery +monitor +moped +mortar +mortarboard +mosque +mosquito net +motor scooter +mountain bike +mountain tent +mouse +mousetrap +moving van +muzzle +nail +neck brace +necklace +nipple +notebook +obelisk +oboe +ocarina +odometer +oil filter +organ +oscilloscope +overskirt +oxcart +oxygen mask +packet +paddle +paddlewheel +padlock +paintbrush +pajama +palace +panpipe +paper towel +parachute +parallel bars +park bench +parking meter +passenger car +patio +pay-phone +pedestal +pencil box +pencil sharpener +perfume +Petri dish +photocopier +pick +pickelhaube +picket fence +pickup +pier +piggy bank +pill bottle +pillow +ping-pong ball +pinwheel +pirate +pitcher +plane +planetarium +plastic bag +plate rack +plow +plunger +Polaroid camera +pole +police van +poncho +pool table +pop bottle +pot +potter's wheel +power drill +prayer rug +printer +prison +projectile +projector +puck +punching bag +purse +quill +quilt +racer +racket +radiator +radio +radio telescope +rain barrel +recreational vehicle +reel +reflex camera +refrigerator +remote control +restaurant +revolver +rifle +rocking chair +rotisserie +rubber eraser +rugby ball +rule +running shoe +safe +safety pin +saltshaker +sandal +sarong +sax +scabbard +scale +school bus +schooner +scoreboard +screen +screw +screwdriver +seat belt +sewing machine +shield +shoe shop +shoji +shopping basket +shopping cart +shovel +shower cap +shower curtain +ski +ski mask +sleeping bag +slide rule +sliding door +slot +snorkel +snowmobile +snowplow +soap dispenser +soccer ball +sock +solar dish +sombrero +soup bowl +space bar +space heater +space shuttle +spatula +speedboat +spider web +spindle +sports car +spotlight +stage +steam locomotive +steel arch bridge +steel drum +stethoscope +stole +stone wall +stopwatch +stove +strainer +streetcar +stretcher +studio couch +stupa +submarine +suit +sundial +sunglass +sunglasses +sunscreen +suspension bridge +swab +sweatshirt +swimming trunks +swing +switch +syringe +table lamp +tank +tape player +teapot +teddy +television +tennis ball +thatch +theater curtain +thimble +thresher +throne +tile roof +toaster +tobacco shop +toilet seat +torch +totem pole +tow truck +toyshop +tractor +trailer truck +tray +trench coat +tricycle +trimaran +tripod +triumphal arch +trolleybus +trombone +tub +turnstile +typewriter keyboard +umbrella +unicycle +upright +vacuum +vase +vault +velvet +vending machine +vestment +viaduct +violin +volleyball +waffle iron +wall clock +wallet +wardrobe +warplane +washbasin +washer +water bottle +water jug +water tower +whiskey jug +whistle +wig +window screen +window shade +Windsor tie +wine bottle +wing +wok +wooden spoon +wool +worm fence +wreck +yawl +yurt +web site +comic book +crossword puzzle +street sign +traffic light +book jacket +menu +plate +guacamole +consomme +hot pot +trifle +ice cream +ice lolly +French loaf +bagel +pretzel +cheeseburger +hotdog +mashed potato +head cabbage +broccoli +cauliflower +zucchini +spaghetti squash +acorn squash +butternut squash +cucumber +artichoke +bell pepper +cardoon +mushroom +Granny Smith +strawberry +orange +lemon +fig +pineapple +banana +jackfruit +custard apple +pomegranate +hay +carbonara +chocolate sauce +dough +meat loaf +pizza +potpie +burrito +red wine +espresso +cup +eggnog +alp +bubble +cliff +coral reef +geyser +lakeside +promontory +sandbar +seashore +valley +volcano +ballplayer +groom +scuba diver +rapeseed +daisy +yellow lady's slipper +corn +acorn +hip +buckeye +coral fungus +agaric +gyromitra +stinkhorn +earthstar +hen-of-the-woods +bolete +ear +toilet tissue diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/mobilenet_quantized_scripted_925.pt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/mobilenet_quantized_scripted_925.pt new file mode 100644 index 0000000..96deea2 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/mobilenet_quantized_scripted_925.pt differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/mobilenet_v2_1.0_224.tflite b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/mobilenet_v2_1.0_224.tflite new file mode 100644 index 0000000..56268e1 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/mobilenet_v2_1.0_224.tflite differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/resnet18.pt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/resnet18.pt new file mode 100644 index 0000000..1cea5f5 Binary files /dev/null and b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/assets/resnet18.pt differ diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/AbstractCameraXActivity.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/AbstractCameraXActivity.kt new file mode 100644 index 0000000..e621249 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/AbstractCameraXActivity.kt @@ -0,0 +1,192 @@ +package com.shibuiwilliam.tflitepytorch + +import android.Manifest +import android.content.pm.PackageManager +import android.graphics.Matrix +import android.os.Bundle +import android.os.Handler +import android.os.HandlerThread +import android.util.Log +import android.util.Rational +import android.util.Size +import android.view.Surface +import android.view.TextureView +import android.view.ViewGroup +import android.widget.TextView +import androidx.annotation.Nullable +import androidx.annotation.UiThread +import androidx.annotation.WorkerThread +import androidx.appcompat.app.AppCompatActivity +import androidx.camera.core.* +import androidx.core.app.ActivityCompat +import androidx.core.content.ContextCompat + +abstract class AbstractCameraXActivity : AppCompatActivity(){ + private val TAG: String = AbstractCameraXActivity::class.java.simpleName + + protected var app: App? = null + + private val REQUEST_CODE_PERMISSIONS = 101 + private val REQUIRED_PERMISSIONS = arrayOf( + Manifest.permission.CAMERA + ) + protected var mBackgroundThread: HandlerThread? = null + protected var mBackgroundHandler: Handler? = null + + protected lateinit var textureView: TextureView + internal lateinit var textView: TextView + + private var mLastAnalysisResultTime: Long = System.currentTimeMillis() + + protected abstract fun getContentView(): Int + protected abstract fun getCameraTextureView(): TextureView + protected abstract fun getInferenceTextView(): TextView + + @WorkerThread + @Nullable + protected abstract fun analyzeImage(image: ImageProxy, rotationDegrees: Int): String? + + @UiThread + protected abstract fun showResult(result: String) + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(getContentView()) + + if (app==null) app = application as App + + textureView = getCameraTextureView() + textView = getInferenceTextView() + + if (allPermissionsGranted()) { + startBackgroundThread() + textureView.post { setupCameraX() } + textureView.addOnLayoutChangeListener { _, _, _, _, _, _, _, _, _ -> + updateTransform() + } + } else { + ActivityCompat.requestPermissions( + this, + REQUIRED_PERMISSIONS, + REQUEST_CODE_PERMISSIONS + ) + } + } + + private fun setupCameraX() { + CameraX.unbindAll() + + val screenSize = Size(textureView.width, textureView.height) + val screenAspectRatio = Rational(1, 1) + + val previewConfig = PreviewConfig + .Builder() + .apply { + setLensFacing(CameraX.LensFacing.BACK) + setTargetResolution(screenSize) + setTargetAspectRatio(screenAspectRatio) + setTargetRotation(windowManager.defaultDisplay.rotation) + } + .build() + val preview = Preview(previewConfig) + + preview.setOnPreviewOutputUpdateListener { + val parent = textureView.parent as ViewGroup + parent.removeView(textureView) + textureView.surfaceTexture = it.surfaceTexture + parent.addView(textureView, 0) + } + + val imageAnalysisConfig = ImageAnalysisConfig + .Builder() + .apply { + setCallbackHandler(mBackgroundHandler!!) + setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE) + } + .build() + + val imageAnalysis = ImageAnalysis(imageAnalysisConfig) + imageAnalysis.analyzer = + ImageAnalysis.Analyzer { image: ImageProxy?, rotationDegrees: Int -> + if (System.currentTimeMillis() - mLastAnalysisResultTime < 500) return@Analyzer + if (image == null) return@Analyzer + val result = analyzeImage(image, rotationDegrees) + + if (result != null){ + runOnUiThread(Runnable { showResult(result) }) + } + mLastAnalysisResultTime = System.currentTimeMillis() + } + + CameraX.bindToLifecycle(this, preview, imageAnalysis) + } + + private fun updateTransform() { + val matrix = Matrix() + val centerX = textureView.width / 2f + val centerY = textureView.height / 2f + + val rotationDegrees = when (textureView.display.rotation) { + Surface.ROTATION_0 -> 0 + Surface.ROTATION_90 -> 90 + Surface.ROTATION_180 -> 180 + Surface.ROTATION_270 -> 270 + else -> return + } + matrix.postRotate(-rotationDegrees.toFloat(), centerX, centerY) + textureView.setTransform(matrix) + } + + protected fun startBackgroundThread() { + mBackgroundThread = HandlerThread("BackgroundThread") + mBackgroundThread!!.start() + mBackgroundHandler = Handler(mBackgroundThread!!.looper) + } + + protected fun stopBackgroundThread() { + if (mBackgroundHandler != null) { + mBackgroundThread!!.quitSafely() + mBackgroundThread!!.join() + } + try { + mBackgroundThread = null + mBackgroundHandler = null + } catch (e: InterruptedException) { + Log.e(TAG, "Error on stopping background thread", e) + } + } + + override fun onRequestPermissionsResult( + requestCode: Int, + permissions: Array, + grantResults: IntArray + ) { + if (requestCode == REQUEST_CODE_PERMISSIONS) { + if (!allPermissionsGranted()) { + finish() + } + } + } + + protected fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all { + for (permission in REQUIRED_PERMISSIONS) { + if (ContextCompat.checkSelfPermission(this, permission) + != PackageManager.PERMISSION_GRANTED + ) { + return false + } + } + Log.i(TAG, "Permitted to use camera and internet") + return true + } + + override fun onStop() { + stopBackgroundThread() + super.onStop() + } + + override fun onDestroy() { + stopBackgroundThread() + super.onDestroy() + } +} \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/App.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/App.kt new file mode 100644 index 0000000..4384376 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/App.kt @@ -0,0 +1,23 @@ +package com.shibuiwilliam.tflitepytorch + +import android.app.Activity +import android.app.Application +import android.content.Context + + +class App: Application(){ + private val TAG: String = App::class.java.simpleName + + internal lateinit var labels: List + + override fun onCreate() { + super.onCreate() + } + + internal fun initialize(context: Context){ + labels = Utils.loadLabelList(context, Constants.LABEL_PATH) + } + + internal fun close() { + } +} \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/Constants.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/Constants.kt new file mode 100644 index 0000000..5c8ea05 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/Constants.kt @@ -0,0 +1,28 @@ +package com.shibuiwilliam.tflitepytorch + +import android.graphics.ImageFormat + +object Constants{ + const val LABEL_PATH = "imagenet_labels.txt" + const val TFLITE_MOBILENET_V2_PATH = "mobilenet_v2_1.0_224.tflite" + const val PYTORCH_MOBILENET_QUANTIZED_PATH = "mobilenet_quantized_scripted_925.pt" + const val PYTORCH_RESNET18_PATH = "resnet18.pt" + + const val INPUT_IMAGE_SIZE = 224 + const val IMAGE_FORMAT_NV21 = ImageFormat.NV21 + + const val IMAGE_MEAN = 127.5f + const val IMAGE_STD = 127.5f + const val PROBABILITY_MEAN = 0.0f + const val PROBABILITY_STD = 1.0f + + const val TOPK = 3 + + const val NUM_THREAD = 4 + + enum class Device{ + CPU, + NNAPI, + GPU + } +} \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/MainActivity.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/MainActivity.kt new file mode 100644 index 0000000..28f119d --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/MainActivity.kt @@ -0,0 +1,76 @@ +package com.shibuiwilliam.tflitepytorch + +import android.Manifest +import android.content.Intent +import android.content.pm.PackageManager +import android.os.Bundle +import android.util.Log +import android.view.View +import android.widget.Button +import androidx.appcompat.app.AppCompatActivity +import androidx.core.content.ContextCompat + +class MainActivity : AppCompatActivity() { + private val TAG: String = MainActivity::class.java.simpleName + private val REQUEST_CODE_PERMISSIONS = 101 + private val REQUIRED_PERMISSIONS = arrayOf( + Manifest.permission.CAMERA + ) + + private lateinit var tfliteButton: Button + private lateinit var pytorchButton: Button + + private var app: App? = null + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(R.layout.activity_main) + + if (app==null){ + app = application as App + app!!.initialize(this) + } + + tfliteButton = findViewById(R.id.TFLite) + tfliteButton.setOnClickListener(object : View.OnClickListener { + override fun onClick(v: View?) { + startActivity(Intent(application, TFLiteActivity::class.java)) + } + }) + + pytorchButton = findViewById(R.id.PyTorch) + pytorchButton.setOnClickListener(object : View.OnClickListener { + override fun onClick(v: View?) { + startActivity(Intent(application, PyTorchActivity::class.java)) + } + }) + + } + + override fun onRequestPermissionsResult( + requestCode: Int, + permissions: Array, + grantResults: IntArray) { + if (requestCode == REQUEST_CODE_PERMISSIONS) { + if (!allPermissionsGranted()) { + finish() + } + } + } + + private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all { + for (permission in REQUIRED_PERMISSIONS) { + if (ContextCompat.checkSelfPermission(this, permission) + != PackageManager.PERMISSION_GRANTED) { + return false + } + } + Log.i(TAG, "Permitted to use camera and internet") + return true + } + + override fun onDestroy() { + super.onDestroy() + if (app != null) app!!.close() + } +} diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/PyTorchActivity.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/PyTorchActivity.kt new file mode 100644 index 0000000..0e79d2b --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/PyTorchActivity.kt @@ -0,0 +1,106 @@ +package com.shibuiwilliam.tflitepytorch + +import android.os.Bundle +import android.util.Log +import android.view.TextureView +import android.widget.TextView +import androidx.annotation.Nullable +import androidx.annotation.UiThread +import androidx.annotation.WorkerThread +import androidx.camera.core.ImageProxy +import org.pytorch.IValue +import org.pytorch.Module +import org.pytorch.Tensor +import org.pytorch.torchvision.TensorImageUtils +import java.nio.FloatBuffer + +class PyTorchActivity : AbstractCameraXActivity() { + private val TAG: String = PyTorchActivity::class.java.simpleName + + private lateinit var pytorchModule: Module + + private lateinit var mInputTensorBuffer: FloatBuffer + private lateinit var mInputTensor: Tensor + + override fun getContentView(): Int = R.layout.activity_py_torch + override fun getCameraTextureView(): TextureView = findViewById(R.id.cameraPreviewTextureView) + override fun getInferenceTextView(): TextView = findViewById(R.id.inferenceText) + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + + initializePyTorch() + mInputTensorBuffer = + Tensor.allocateFloatBuffer(3 * Constants.INPUT_IMAGE_SIZE * Constants.INPUT_IMAGE_SIZE) + + mInputTensor = Tensor.fromBlob( + mInputTensorBuffer, + longArrayOf( + 1, + 3, + Constants.INPUT_IMAGE_SIZE.toLong(), + Constants.INPUT_IMAGE_SIZE.toLong() + ) + ) + } + + private fun initializePyTorch() { + pytorchModule = Module.load(Utils.assetFilePath(this, Constants.PYTORCH_RESNET18_PATH)) + } + + @WorkerThread + @Nullable + override fun analyzeImage(image: ImageProxy, rotationDegrees: Int): String? { + try { + TensorImageUtils.imageYUV420CenterCropToFloatBuffer( + image.image, + rotationDegrees, + Constants.INPUT_IMAGE_SIZE, + Constants.INPUT_IMAGE_SIZE, + TensorImageUtils.TORCHVISION_NORM_MEAN_RGB, + TensorImageUtils.TORCHVISION_NORM_STD_RGB, + mInputTensorBuffer, + 0 + ) + val labeledProbability = classifyImage() + Log.i(TAG, "top${Constants.TOPK} prediction: ${labeledProbability}") + return labeledProbability.map { it -> + val p = "%,.2f".format(it.value) + "${it.key}: ${p} \n" + }.joinToString() + } catch (e: Exception) { + e.printStackTrace() + return null + } + } + + @UiThread + override fun showResult(result: String) { + textView.text = result + } + + private fun classifyImage(): Map { + val outputModule = pytorchModule.forward(IValue.from(mInputTensor)).toTensor() + val scores = outputModule.dataAsFloatArray + val labeledProbability = mapScoreToLabelMap(scores) + Log.i(TAG, "full prediction: ${labeledProbability}") + return Utils.prioritizeByProbability(labeledProbability) + } + + private fun mapScoreToLabelMap(score: FloatArray): Map { + val labeledProbability: MutableMap = mutableMapOf() + for (i in 0 until app!!.labels.size - 1) { + labeledProbability[app!!.labels[i + 1]] = score[i] + } + return labeledProbability + } + + override fun onStop() { + super.onStop() + } + + override fun onDestroy() { + super.onDestroy() + } + +} diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/TFLiteActivity.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/TFLiteActivity.kt new file mode 100644 index 0000000..429c6c8 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/TFLiteActivity.kt @@ -0,0 +1,183 @@ +package com.shibuiwilliam.tflitepytorch + +import android.graphics.Bitmap +import android.graphics.Matrix +import android.os.Bundle +import android.util.Log +import android.view.TextureView +import android.widget.TextView +import androidx.annotation.Nullable +import androidx.annotation.UiThread +import androidx.annotation.WorkerThread +import androidx.camera.core.ImageProxy +import org.tensorflow.lite.Interpreter +import org.tensorflow.lite.gpu.GpuDelegate +import org.tensorflow.lite.nnapi.NnApiDelegate +import org.tensorflow.lite.support.common.FileUtil +import org.tensorflow.lite.support.common.TensorOperator +import org.tensorflow.lite.support.common.TensorProcessor +import org.tensorflow.lite.support.common.ops.NormalizeOp +import org.tensorflow.lite.support.image.ImageProcessor +import org.tensorflow.lite.support.image.TensorImage +import org.tensorflow.lite.support.image.ops.ResizeOp +import org.tensorflow.lite.support.image.ops.ResizeOp.ResizeMethod +import org.tensorflow.lite.support.image.ops.ResizeWithCropOrPadOp +import org.tensorflow.lite.support.label.TensorLabel +import org.tensorflow.lite.support.tensorbuffer.TensorBuffer +import java.nio.MappedByteBuffer + + +class TFLiteActivity : AbstractCameraXActivity() { + private val TAG: String = TFLiteActivity::class.java.simpleName + + private lateinit var tfliteModel: MappedByteBuffer + private lateinit var tfliteInterpreter: Interpreter + private val tfliteOptions = Interpreter.Options() + private var gpuDelegate: GpuDelegate? = null + private var nnApiDelegate: NnApiDelegate? = null + + private lateinit var inputImageBuffer: TensorImage + private lateinit var outputProbabilityBuffer: TensorBuffer + private lateinit var probabilityProcessor: TensorProcessor + + override fun getContentView(): Int = R.layout.activity_t_f_lite + override fun getCameraTextureView(): TextureView = findViewById(R.id.cameraPreviewTextureView) + override fun getInferenceTextView(): TextView = findViewById(R.id.inferenceText) + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + + initializeTFLite(Constants.Device.NNAPI, Constants.NUM_THREAD) + + inputImageBuffer = TensorImage(tfliteInterpreter.getInputTensor(0).dataType()) + outputProbabilityBuffer = TensorBuffer.createFixedSize( + tfliteInterpreter.getOutputTensor(0).shape(), + tfliteInterpreter.getInputTensor(0).dataType() + ) + probabilityProcessor = TensorProcessor + .Builder() + .add(postprocessNormalizeOp()) + .build() + } + + private fun initializeTFLite(device: Constants.Device, numThreads: Int) { + when (device) { + Constants.Device.NNAPI -> { + nnApiDelegate = NnApiDelegate() + tfliteOptions.addDelegate(nnApiDelegate) + } + Constants.Device.GPU -> { + gpuDelegate = GpuDelegate() + tfliteOptions.addDelegate(gpuDelegate) + } + Constants.Device.CPU -> { + } + } + tfliteOptions.setNumThreads(numThreads) + tfliteModel = FileUtil.loadMappedFile(this, Constants.TFLITE_MOBILENET_V2_PATH) + tfliteInterpreter = Interpreter(tfliteModel, tfliteOptions) + } + + @WorkerThread + @Nullable + override fun analyzeImage(image: ImageProxy, rotationDegrees: Int): String? { + try { + var bitmap = Utils.imageToBitmap(image) + bitmap = rotateBitmap(bitmap, 90f) + val labeledProbability = classifyImage(bitmap) + Log.i(TAG, "top${Constants.TOPK} prediction: ${labeledProbability}") + return labeledProbability.map{it -> + val p = "%,.2f".format(it.value) + "${it.key}: ${p} \n" + }.joinToString() + } + catch (e: Exception){ + e.printStackTrace() + return null + } + } + + @UiThread + override fun showResult(result: String) { + textView.text = result + } + + private fun classifyImage(bitmap: Bitmap): Map { + val inputImageBuffer = loadImage(bitmap) + tfliteInterpreter.run( + inputImageBuffer!!.buffer, + outputProbabilityBuffer.buffer.rewind() + ) + val labeledProbability: Map = TensorLabel( + app!!.labels, + probabilityProcessor.process(outputProbabilityBuffer) + ).mapWithFloatValue + Log.i(TAG, "full prediction: ${labeledProbability}") + return Utils.prioritizeByProbability(labeledProbability) + } + + private fun loadImage(bitmap: Bitmap): TensorImage? { + inputImageBuffer.load(bitmap) + + val cropSize = Math.min(bitmap.width, bitmap.height) + + val imageProcessor = ImageProcessor + .Builder() + .add(ResizeWithCropOrPadOp(cropSize, cropSize)) + .add( + ResizeOp( + Constants.INPUT_IMAGE_SIZE, + Constants.INPUT_IMAGE_SIZE, + ResizeMethod.NEAREST_NEIGHBOR + ) + ) + .add(preprocessNormalizeOp()) + .build() + return imageProcessor.process(inputImageBuffer) + } + + private fun rotateBitmap(bitmap: Bitmap, degrees: Float): Bitmap { + val matrix = Matrix() + matrix.postRotate(degrees) + return Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true) + } + + private fun preprocessNormalizeOp(): TensorOperator? { + return NormalizeOp(Constants.IMAGE_MEAN, Constants.IMAGE_STD) + } + + protected fun postprocessNormalizeOp(): TensorOperator? { + return NormalizeOp(Constants.PROBABILITY_MEAN, Constants.PROBABILITY_STD) + } + + override fun onStop() { + super.onStop() + if (::tfliteInterpreter.isInitialized) { + tfliteInterpreter.close() + } + if (gpuDelegate != null) { + gpuDelegate!!.close() + gpuDelegate = null + } + if (nnApiDelegate != null) { + nnApiDelegate!!.close() + nnApiDelegate = null + } + } + + override fun onDestroy() { + super.onDestroy() + if (::tfliteInterpreter.isInitialized) { + tfliteInterpreter.close() + } + if (gpuDelegate != null) { + gpuDelegate!!.close() + gpuDelegate = null + } + if (nnApiDelegate != null) { + nnApiDelegate!!.close() + nnApiDelegate = null + } + + } +} diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/Utils.kt b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/Utils.kt new file mode 100644 index 0000000..f562428 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/java/com/shibuiwilliam/tflitepytorch/Utils.kt @@ -0,0 +1,106 @@ +package com.shibuiwilliam.tflitepytorch + +import android.content.Context +import android.graphics.Bitmap +import android.graphics.BitmapFactory +import android.graphics.Rect +import android.graphics.YuvImage +import android.util.Log +import androidx.camera.core.ImageProxy +import java.io.* +import java.util.* +import kotlin.Comparator + +object Utils{ + private val TAG: String = Utils::class.java.simpleName + + fun loadLabelList(context: Context, labelPath: String = Constants.LABEL_PATH): List { + Log.v(TAG, "Loading ${labelPath}") + val labelList: MutableList = mutableListOf() + try { + BufferedReader(InputStreamReader(context.assets.open(labelPath))).use { reader -> + var line = reader.readLine() + while (line != null) { + labelList.add(line) + line = reader.readLine() + } + } + } + catch (e: IOException) { + Log.e(TAG, "Failed to read label list.", e) + } + + return labelList + } + + fun assetFilePath(context: Context, assetName: String): String? { + val file = File(context.filesDir, assetName) + if (file.exists() && file.length() > 0) { + return file.absolutePath + } + try { + context.assets.open(assetName).use { `is` -> + FileOutputStream(file).use { os -> + val buffer = ByteArray(4 * 1024) + var read: Int + while (`is`.read(buffer).also { read = it } != -1) { + os.write(buffer, 0, read) + } + os.flush() + } + return file.absolutePath + } + } catch (e: IOException) { + Log.e(TAG, "Error process asset $assetName to file path") + } + return null + } + + fun imageToBitmap(image: ImageProxy): Bitmap { + val yBuffer = image.planes[0].buffer + val uBuffer = image.planes[1].buffer + val vBuffer = image.planes[2].buffer + + val ySize = yBuffer.remaining() + val uSize = uBuffer.remaining() + val vSize = vBuffer.remaining() + + val nv21 = ByteArray(ySize + uSize + vSize) + + yBuffer.get(nv21, 0, ySize) + vBuffer.get(nv21, ySize, vSize) + uBuffer.get(nv21, ySize + vSize, uSize) + + val yuvImage = YuvImage(nv21, Constants.IMAGE_FORMAT_NV21, image.width, image.height, null) + val out = ByteArrayOutputStream() + yuvImage.compressToJpeg(Rect(0, 0, yuvImage.width, yuvImage.height), 100, out) + val imageBytes = out.toByteArray() + return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.size) + } + + fun prioritizeByProbability(labeledProbability: Map): MutableMap { + val priorityMap: MutableMap = mutableMapOf() + val priorityQueue = PriorityQueue( + Constants.TOPK, + Comparator {a,b -> + val aProb = labeledProbability.getOrDefault(a, 0f) + val bProb = labeledProbability.getOrDefault(b, 0f) + when { + aProb > bProb -> 1 + aProb < bProb -> -1 + else -> 0 + } + } + ) + for (k in labeledProbability.keys){ + priorityQueue.add(k) + if (priorityQueue.size > Constants.TOPK) priorityQueue.remove() + } + for (i in 0 until Constants.TOPK){ + val p = priorityQueue.poll() + priorityMap[p] = labeledProbability.getOrDefault(p, 0f) + } + return priorityMap + } + +} \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/drawable-v24/ic_launcher_foreground.xml new file mode 100644 index 0000000..2b068d1 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/drawable/ic_launcher_background.xml b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 0000000..07d5da9 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/layout/activity_main.xml b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/layout/activity_main.xml new file mode 100644 index 0000000..8a3e3d6 --- /dev/null +++ b/MLOps/serving_patterns/edge_ai_pattern/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,43 @@ + + + + + +