2
.github/workflows/build-test.yml
vendored
2
.github/workflows/build-test.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
python-version: [3.7, 3.8, 3.9]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
from openapi_parser.parser.loader import OpenApiParser
|
||||
from prance import BaseParser
|
||||
|
||||
import json
|
||||
import os
|
||||
@ -12,17 +12,13 @@ def main():
|
||||
cwd = os.getcwd()
|
||||
path = os.path.join(cwd, 'spec.json')
|
||||
print("opening spec file: ", path)
|
||||
parser = OpenApiParser.open(path)
|
||||
# Ignore the security definitions.
|
||||
parser.load_metadata()
|
||||
parser.load_schemas()
|
||||
parser.load_path_items()
|
||||
parser = BaseParser(path)
|
||||
|
||||
# Generate the types.
|
||||
generateTypes(cwd, parser)
|
||||
generateTypes(cwd, parser.specification)
|
||||
|
||||
# Generate the paths.
|
||||
data = generatePaths(cwd, parser)
|
||||
data = generatePaths(cwd, parser.specification)
|
||||
|
||||
# Add the client information to the generation.
|
||||
data['info']['x-python'] = {
|
||||
@ -47,7 +43,7 @@ client = ClientFromEnv()""",
|
||||
f.close()
|
||||
|
||||
|
||||
def generatePaths(cwd: str, parser: OpenApiParser) -> dict:
|
||||
def generatePaths(cwd: str, parser: dict) -> dict:
|
||||
# Make sure we have the directory.
|
||||
path = os.path.join(cwd, 'kittycad', 'api')
|
||||
os.makedirs(path, exist_ok=True)
|
||||
@ -61,7 +57,7 @@ def generatePaths(cwd: str, parser: OpenApiParser) -> dict:
|
||||
f.close()
|
||||
|
||||
# Generate the directory/__init__.py for each of the tags.
|
||||
tags = parser.data['tags']
|
||||
tags = parser['tags']
|
||||
for tag in tags:
|
||||
tag_name = tag['name']
|
||||
tag_description = tag['description']
|
||||
@ -82,7 +78,7 @@ def generatePaths(cwd: str, parser: OpenApiParser) -> dict:
|
||||
f.close()
|
||||
|
||||
# Generate the paths.
|
||||
data = parser.data
|
||||
data = parser
|
||||
paths = data['paths']
|
||||
for p in paths:
|
||||
for method in paths[p]:
|
||||
@ -313,7 +309,11 @@ response: Response[""" + success_type + """] = await """ + fn_name + """.asyncio
|
||||
else:
|
||||
raise Exception("Unknown type")
|
||||
else:
|
||||
raise Exception("Unknown type")
|
||||
f.write(
|
||||
"\t\tresponse_" +
|
||||
response_code +
|
||||
" = response.json()\n")
|
||||
|
||||
elif '$ref' in response:
|
||||
schema_name = response['$ref'].replace(
|
||||
'#/components/responses/', '')
|
||||
@ -628,7 +628,7 @@ response: Response[""" + success_type + """] = await """ + fn_name + """.asyncio
|
||||
return data
|
||||
|
||||
|
||||
def generateTypes(cwd: str, parser: OpenApiParser):
|
||||
def generateTypes(cwd: str, parser: dict):
|
||||
# Make sure we have the directory.
|
||||
path = os.path.join(cwd, 'kittycad', 'models')
|
||||
os.makedirs(path, exist_ok=True)
|
||||
@ -641,7 +641,7 @@ def generateTypes(cwd: str, parser: OpenApiParser):
|
||||
f.write("\n")
|
||||
|
||||
# Generate the types.
|
||||
data = parser.data
|
||||
data = parser
|
||||
schemas = data['components']['schemas']
|
||||
for key in schemas:
|
||||
schema = schemas[key]
|
||||
@ -673,6 +673,7 @@ def generateType(path: str, name: str, schema: dict):
|
||||
|
||||
refs = getRefs(schema)
|
||||
for ref in refs:
|
||||
print(" ref: ", ref, "schema: ", [schema])
|
||||
f.write(
|
||||
"from ..models." +
|
||||
camel_to_snake(ref) +
|
||||
@ -697,7 +698,7 @@ def generateType(path: str, name: str, schema: dict):
|
||||
# Write the property.
|
||||
if property_type == 'string':
|
||||
if 'format' in property_schema:
|
||||
if property_schema['format'] == 'date-time':
|
||||
if property_schema['format'] == 'date-time' or property_schema['format'] == 'partial-date-time':
|
||||
f.write(
|
||||
"\t" +
|
||||
property_name +
|
||||
@ -734,6 +735,19 @@ def generateType(path: str, name: str, schema: dict):
|
||||
": Union[Unset, " +
|
||||
ref +
|
||||
"] = UNSET\n")
|
||||
elif 'allOf' in property_schema:
|
||||
thing = property_schema['allOf'][0]
|
||||
if '$ref' in thing:
|
||||
ref = thing['$ref'].replace(
|
||||
'#/components/schemas/', '')
|
||||
f.write(
|
||||
"\t" +
|
||||
property_name +
|
||||
": Union[Unset, " +
|
||||
ref +
|
||||
"] = UNSET\n")
|
||||
else:
|
||||
raise Exception(" unknown allOf type: ", property_schema)
|
||||
else:
|
||||
raise Exception(" unknown schema: ", property_schema)
|
||||
|
||||
@ -754,7 +768,7 @@ def generateType(path: str, name: str, schema: dict):
|
||||
# Write the property.
|
||||
if property_type == 'string':
|
||||
if 'format' in property_schema:
|
||||
if property_schema['format'] == 'date-time':
|
||||
if property_schema['format'] == 'date-time' or property_schema['format'] == 'partial-date-time':
|
||||
f.write(
|
||||
"\t\t" +
|
||||
property_name +
|
||||
@ -815,6 +829,27 @@ def generateType(path: str, name: str, schema: dict):
|
||||
" = self." +
|
||||
property_name +
|
||||
".value\n")
|
||||
elif 'allOf' in property_schema:
|
||||
thing = property_schema['allOf'][0]
|
||||
if '$ref' in thing:
|
||||
ref = thing['$ref'].replace(
|
||||
'#/components/schemas/', '')
|
||||
f.write(
|
||||
"\t\t" +
|
||||
property_name +
|
||||
": Union[Unset, str] = UNSET\n")
|
||||
f.write(
|
||||
"\t\tif not isinstance(self." +
|
||||
property_name +
|
||||
", Unset):\n")
|
||||
f.write(
|
||||
"\t\t\t" +
|
||||
property_name +
|
||||
" = self." +
|
||||
property_name +
|
||||
".value\n")
|
||||
else:
|
||||
raise Exception(" unknown allOf type: ", property_schema)
|
||||
else:
|
||||
raise Exception(" unknown schema: ", property_schema)
|
||||
|
||||
@ -854,7 +889,7 @@ def generateType(path: str, name: str, schema: dict):
|
||||
# Write the property.
|
||||
if property_type == 'string':
|
||||
if 'format' in property_schema:
|
||||
if property_schema['format'] == 'date-time':
|
||||
if property_schema['format'] == 'date-time' or property_schema['format'] == 'partial-date-time':
|
||||
f.write(
|
||||
"\t\t_" +
|
||||
property_name +
|
||||
@ -928,6 +963,30 @@ def generateType(path: str, name: str, schema: dict):
|
||||
f.write("\t\t\t" + property_name + " = " +
|
||||
ref + "(_" + property_name + ")\n")
|
||||
f.write("\n")
|
||||
elif 'allOf' in property_schema:
|
||||
thing = property_schema['allOf'][0]
|
||||
if '$ref' in thing:
|
||||
ref = thing['$ref'].replace(
|
||||
'#/components/schemas/', '')
|
||||
f.write(
|
||||
"\t\t_" +
|
||||
property_name +
|
||||
" = d.pop(\"" +
|
||||
property_name +
|
||||
"\", UNSET)\n")
|
||||
f.write("\t\t" + property_name +
|
||||
": Union[Unset, " + ref + "]\n")
|
||||
f.write(
|
||||
"\t\tif isinstance(_" +
|
||||
property_name +
|
||||
", Unset):\n")
|
||||
f.write("\t\t\t" + property_name + " = UNSET\n")
|
||||
f.write("\t\telse:\n")
|
||||
f.write("\t\t\t" + property_name + " = " +
|
||||
ref + "(_" + property_name + ")\n")
|
||||
f.write("\n")
|
||||
else:
|
||||
raise Exception(" unknown allOf type: ", property_schema)
|
||||
else:
|
||||
print(" unknown schema: ", property_schema)
|
||||
raise Exception(" unknown schema: ", property_schema)
|
||||
@ -1004,7 +1063,7 @@ def hasDateTime(schema: dict) -> bool:
|
||||
if has_date_time:
|
||||
return True
|
||||
elif type_name == 'string' and 'format' in schema:
|
||||
if schema['format'] == 'date-time':
|
||||
if schema['format'] == 'date-time' or schema['format'] == 'partial-date-time':
|
||||
return True
|
||||
|
||||
return False
|
||||
@ -1017,6 +1076,14 @@ def getRefs(schema: dict) -> [str]:
|
||||
|
||||
else:
|
||||
# Generate the type.
|
||||
if not 'type' in schema:
|
||||
if 'allOf' in schema:
|
||||
for sub_schema in schema['allOf']:
|
||||
refs.extend(getRefs(sub_schema))
|
||||
else:
|
||||
print(" unsupported type: ", schema)
|
||||
raise Exception(" unsupported type: ", schema)
|
||||
else:
|
||||
type_name = schema['type']
|
||||
if type_name == 'object':
|
||||
# Iternate over the properties.
|
||||
@ -1055,9 +1122,9 @@ def getEndpointRefs(endpoint: dict, data: dict) -> [str]:
|
||||
else:
|
||||
raise Exception("Unknown array type")
|
||||
else:
|
||||
raise Exception("Unknown type")
|
||||
raise Exception("Unknown type ", json['type'])
|
||||
else:
|
||||
raise Exception("Unknown type")
|
||||
refs.append('dict')
|
||||
elif '$ref' in response:
|
||||
schema_name = response['$ref'].replace(
|
||||
'#/components/responses/', '')
|
||||
@ -1123,6 +1190,8 @@ def getRequestBodyType(endpoint: dict) -> str:
|
||||
return ref
|
||||
elif content_type == 'text/plain':
|
||||
return 'bytes'
|
||||
elif content_type == 'application/octet-stream':
|
||||
return 'bytes'
|
||||
else:
|
||||
print(" unsupported content type: ", content_type)
|
||||
raise Exception("unsupported content type")
|
||||
|
@ -1 +1 @@
|
||||
""" Contains methods for accessing the file API paths: CAD file operations. """
|
||||
""" Contains methods for accessing the file API paths: CAD file operations. Create, get, and list CAD file conversions. More endpoints will be added here in the future as we build out transforms, etc on CAD models. """
|
||||
|
@ -1 +1 @@
|
||||
""" Contains methods for accessing the meta API paths: Meta information about servers, instances, and sessions. """
|
||||
""" Contains methods for accessing the meta API paths: Meta information about the API. """
|
||||
|
@ -3,7 +3,8 @@ from typing import Any, Dict, Optional, Union
|
||||
import httpx
|
||||
|
||||
from ...client import Client
|
||||
from ...models.pong_message import PongMessage
|
||||
from ...models.pong import Pong
|
||||
from ...models.error import Error
|
||||
from ...types import Response
|
||||
|
||||
def _get_kwargs(
|
||||
@ -23,14 +24,20 @@ def _get_kwargs(
|
||||
}
|
||||
|
||||
|
||||
def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, PongMessage]]:
|
||||
def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, Pong, Error]]:
|
||||
if response.status_code == 200:
|
||||
response_200 = PongMessage.from_dict(response.json())
|
||||
response_200 = Pong.from_dict(response.json())
|
||||
return response_200
|
||||
if response.status_code == 4XX:
|
||||
response_4XX = Error.from_dict(response.json())
|
||||
return response_4XX
|
||||
if response.status_code == 5XX:
|
||||
response_5XX = Error.from_dict(response.json())
|
||||
return response_5XX
|
||||
return None
|
||||
|
||||
|
||||
def _build_response(*, response: httpx.Response) -> Response[Union[Any, PongMessage]]:
|
||||
def _build_response(*, response: httpx.Response) -> Response[Union[Any, Pong, Error]]:
|
||||
return Response(
|
||||
status_code=response.status_code,
|
||||
content=response.content,
|
||||
@ -42,7 +49,7 @@ def _build_response(*, response: httpx.Response) -> Response[Union[Any, PongMess
|
||||
def sync_detailed(
|
||||
*,
|
||||
client: Client,
|
||||
) -> Response[Union[Any, PongMessage]]:
|
||||
) -> Response[Union[Any, Pong, Error]]:
|
||||
kwargs = _get_kwargs(
|
||||
client=client,
|
||||
)
|
||||
@ -58,8 +65,7 @@ def sync_detailed(
|
||||
def sync(
|
||||
*,
|
||||
client: Client,
|
||||
) -> Optional[Union[Any, PongMessage]]:
|
||||
""" Simple ping to the server. """
|
||||
) -> Optional[Union[Any, Pong, Error]]:
|
||||
|
||||
return sync_detailed(
|
||||
client=client,
|
||||
@ -69,7 +75,7 @@ def sync(
|
||||
async def asyncio_detailed(
|
||||
*,
|
||||
client: Client,
|
||||
) -> Response[Union[Any, PongMessage]]:
|
||||
) -> Response[Union[Any, Pong, Error]]:
|
||||
kwargs = _get_kwargs(
|
||||
client=client,
|
||||
)
|
||||
@ -83,8 +89,7 @@ async def asyncio_detailed(
|
||||
async def asyncio(
|
||||
*,
|
||||
client: Client,
|
||||
) -> Optional[Union[Any, PongMessage]]:
|
||||
""" Simple ping to the server. """
|
||||
) -> Optional[Union[Any, Pong, Error]]:
|
||||
|
||||
return (
|
||||
await asyncio_detailed(
|
||||
|
@ -1,13 +1,19 @@
|
||||
""" Contains all the data models used in inputs/outputs """
|
||||
|
||||
from .auth_session import AuthSession
|
||||
from .error_message import ErrorMessage
|
||||
from .api_call_query_group import ApiCallQueryGroup
|
||||
from .api_call_query_group_by import ApiCallQueryGroupBy
|
||||
from .api_call_with_price import ApiCallWithPrice
|
||||
from .api_token import ApiToken
|
||||
from .created_at_sort_mode import CreatedAtSortMode
|
||||
from .error import Error
|
||||
from .extended_user import ExtendedUser
|
||||
from .file_conversion import FileConversion
|
||||
from .file_conversion_output_format import FileConversionOutputFormat
|
||||
from .file_conversion_source_format import FileConversionSourceFormat
|
||||
from .file_conversion_status import FileConversionStatus
|
||||
from .gpu_device import GPUDevice
|
||||
from .instance import Instance
|
||||
from .pong_enum import PongEnum
|
||||
from .pong_message import PongMessage
|
||||
from .server_env import ServerEnv
|
||||
from .valid_output_file_format import ValidOutputFileFormat
|
||||
from .valid_source_file_format import ValidSourceFileFormat
|
||||
from .file_conversion_with_output import FileConversionWithOutput
|
||||
from .method import Method
|
||||
from .pong import Pong
|
||||
from .status_code import StatusCode
|
||||
from .user import User
|
||||
from .uuid import Uuid
|
||||
|
@ -4,25 +4,29 @@ from typing import Any, Dict, List, Type, TypeVar, Union
|
||||
import attr
|
||||
from dateutil.parser import isoparse
|
||||
|
||||
from ..models.valid_output_file_format import ValidOutputFileFormat
|
||||
from ..models.valid_source_file_format import ValidSourceFileFormat
|
||||
from ..models.uuid import Uuid
|
||||
from ..models.file_conversion_output_format import FileConversionOutputFormat
|
||||
from ..models.file_conversion_source_format import FileConversionSourceFormat
|
||||
from ..models.file_conversion_status import FileConversionStatus
|
||||
from ..types import UNSET, Unset
|
||||
|
||||
T = TypeVar("T", bound="FileConversion")
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class FileConversion:
|
||||
""" """
|
||||
completed_at: Union[Unset, datetime.datetime] = UNSET
|
||||
created_at: Union[Unset, datetime.datetime] = UNSET
|
||||
id: Union[Unset, str] = UNSET
|
||||
output: Union[Unset, str] = UNSET
|
||||
output_format: Union[Unset, ValidOutputFileFormat] = UNSET
|
||||
src_format: Union[Unset, ValidSourceFileFormat] = UNSET
|
||||
id: Union[Unset, Uuid] = UNSET
|
||||
output_file_link: Union[Unset, str] = UNSET
|
||||
output_format: Union[Unset, FileConversionOutputFormat] = UNSET
|
||||
src_file_link: Union[Unset, str] = UNSET
|
||||
src_format: Union[Unset, FileConversionSourceFormat] = UNSET
|
||||
started_at: Union[Unset, datetime.datetime] = UNSET
|
||||
status: Union[Unset, FileConversionStatus] = UNSET
|
||||
updated_at: Union[Unset, datetime.datetime] = UNSET
|
||||
user_id: Union[Unset, str] = UNSET
|
||||
worker: Union[Unset, str] = UNSET
|
||||
|
||||
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
|
||||
|
||||
@ -33,11 +37,14 @@ class FileConversion:
|
||||
created_at: Union[Unset, str] = UNSET
|
||||
if not isinstance(self.created_at, Unset):
|
||||
created_at = self.created_at.isoformat()
|
||||
id = self.id
|
||||
output = self.output
|
||||
id: Union[Unset, str] = UNSET
|
||||
if not isinstance(self.id, Unset):
|
||||
id = self.id.value
|
||||
output_file_link = self.output_file_link
|
||||
output_format: Union[Unset, str] = UNSET
|
||||
if not isinstance(self.output_format, Unset):
|
||||
output_format = self.output_format.value
|
||||
src_file_link = self.src_file_link
|
||||
src_format: Union[Unset, str] = UNSET
|
||||
if not isinstance(self.src_format, Unset):
|
||||
src_format = self.src_format.value
|
||||
@ -47,6 +54,11 @@ class FileConversion:
|
||||
status: Union[Unset, str] = UNSET
|
||||
if not isinstance(self.status, Unset):
|
||||
status = self.status.value
|
||||
updated_at: Union[Unset, str] = UNSET
|
||||
if not isinstance(self.updated_at, Unset):
|
||||
updated_at = self.updated_at.isoformat()
|
||||
user_id = self.user_id
|
||||
worker = self.worker
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(self.additional_properties)
|
||||
@ -57,16 +69,24 @@ class FileConversion:
|
||||
field_dict['created_at'] = created_at
|
||||
if id is not UNSET:
|
||||
field_dict['id'] = id
|
||||
if output is not UNSET:
|
||||
field_dict['output'] = output
|
||||
if output_file_link is not UNSET:
|
||||
field_dict['output_file_link'] = output_file_link
|
||||
if output_format is not UNSET:
|
||||
field_dict['output_format'] = output_format
|
||||
if src_file_link is not UNSET:
|
||||
field_dict['src_file_link'] = src_file_link
|
||||
if src_format is not UNSET:
|
||||
field_dict['src_format'] = src_format
|
||||
if started_at is not UNSET:
|
||||
field_dict['started_at'] = started_at
|
||||
if status is not UNSET:
|
||||
field_dict['status'] = status
|
||||
if updated_at is not UNSET:
|
||||
field_dict['updated_at'] = updated_at
|
||||
if user_id is not UNSET:
|
||||
field_dict['user_id'] = user_id
|
||||
if worker is not UNSET:
|
||||
field_dict['worker'] = worker
|
||||
|
||||
return field_dict
|
||||
|
||||
@ -87,23 +107,30 @@ class FileConversion:
|
||||
else:
|
||||
created_at = isoparse(_created_at)
|
||||
|
||||
id = d.pop("id", UNSET)
|
||||
_id = d.pop("id", UNSET)
|
||||
id: Union[Unset, Uuid]
|
||||
if isinstance(_id, Unset):
|
||||
id = UNSET
|
||||
else:
|
||||
id = Uuid(_id)
|
||||
|
||||
output = d.pop("output", UNSET)
|
||||
output_file_link = d.pop("output_file_link", UNSET)
|
||||
|
||||
_output_format = d.pop("output_format", UNSET)
|
||||
output_format: Union[Unset, ValidOutputFileFormat]
|
||||
output_format: Union[Unset, FileConversionOutputFormat]
|
||||
if isinstance(_output_format, Unset):
|
||||
output_format = UNSET
|
||||
else:
|
||||
output_format = ValidOutputFileFormat(_output_format)
|
||||
output_format = FileConversionOutputFormat(_output_format)
|
||||
|
||||
src_file_link = d.pop("src_file_link", UNSET)
|
||||
|
||||
_src_format = d.pop("src_format", UNSET)
|
||||
src_format: Union[Unset, ValidSourceFileFormat]
|
||||
src_format: Union[Unset, FileConversionSourceFormat]
|
||||
if isinstance(_src_format, Unset):
|
||||
src_format = UNSET
|
||||
else:
|
||||
src_format = ValidSourceFileFormat(_src_format)
|
||||
src_format = FileConversionSourceFormat(_src_format)
|
||||
|
||||
_started_at = d.pop("started_at", UNSET)
|
||||
started_at: Union[Unset, datetime.datetime]
|
||||
@ -119,15 +146,31 @@ class FileConversion:
|
||||
else:
|
||||
status = FileConversionStatus(_status)
|
||||
|
||||
_updated_at = d.pop("updated_at", UNSET)
|
||||
updated_at: Union[Unset, datetime.datetime]
|
||||
if isinstance(_updated_at, Unset):
|
||||
updated_at = UNSET
|
||||
else:
|
||||
updated_at = isoparse(_updated_at)
|
||||
|
||||
user_id = d.pop("user_id", UNSET)
|
||||
|
||||
worker = d.pop("worker", UNSET)
|
||||
|
||||
|
||||
file_conversion = cls(
|
||||
completed_at= completed_at,
|
||||
created_at= created_at,
|
||||
id= id,
|
||||
output=output,
|
||||
output_file_link= output_file_link,
|
||||
output_format= output_format,
|
||||
src_file_link= src_file_link,
|
||||
src_format= src_format,
|
||||
started_at= started_at,
|
||||
status= status,
|
||||
updated_at= updated_at,
|
||||
user_id= user_id,
|
||||
worker= worker,
|
||||
)
|
||||
|
||||
file_conversion.additional_properties = d
|
||||
|
@ -1,6 +1,5 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class FileConversionStatus(str, Enum):
|
||||
QUEUED = 'Queued'
|
||||
UPLOADED = 'Uploaded'
|
||||
|
@ -12,7 +12,7 @@ packages = [
|
||||
include = ["CHANGELOG.md", "kittycad/py.typed"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.6"
|
||||
python = "^3.7"
|
||||
httpx = ">=0.15.4,<0.21.0"
|
||||
attrs = ">=20.1.0,<22.0.0"
|
||||
python-dateutil = "^2.8.0"
|
||||
@ -31,6 +31,8 @@ pytest-cov = "^3.0.0"
|
||||
pytest-asyncio = "^0.16.0"
|
||||
openapi-parser = "^0.2.6"
|
||||
autopep8 = "^1.6.0"
|
||||
prance = "^0.21.8"
|
||||
openapi-spec-validator = "^0.4.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=1.0"]
|
||||
|
Reference in New Issue
Block a user