* Create publish-docker-images.yaml

* Add copilot_proxy publishing

* Add model_converter publishing

* Use dockerhub version

* Do not login for PRs

* Overwrite some of labels value

* Move ignore files to the root of `context`

* Add comments & fix some issue

* Fix typos

* Remove the target of the master branch

* Delete .dockerignore

* Delete .dockerignore

* Add Flake8

* Add Flake8 and format code accordingly

* Iterate on the PR template, fix the token for the contributor action

* Remove converter image build

* Update Dockerfile of proxy

* Comment out proxy image in compose

Co-authored-by: Fred de Gier <freddegier@me.com>

* Fix build action

---------

Co-authored-by: Rowe Wilson Frederisk Holme <frederisk@outlook.com>
This commit is contained in:
Fred de Gier 2023-02-15 09:17:07 +01:00 committed by GitHub
commit 283668448d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 198 additions and 67 deletions

View file

@ -27,6 +27,7 @@ app = FastAPI(
swagger_ui_parameters={"defaultModelsExpandDepth": -1}
)
@app.exception_handler(FauxPilotException)
async def fauxpilot_handler(request: Request, exc: FauxPilotException):
return JSONResponse(
@ -34,7 +35,8 @@ async def fauxpilot_handler(request: Request, exc: FauxPilotException):
content=exc.json()
)
# Used to support copilot.vim
# Used to support copilot.vim
@app.get("/copilot_internal/v2/token")
def get_copilot_token():
content = {'token': '1', 'expires_at': 2600000000, 'refresh_in': 900}
@ -43,8 +45,9 @@ def get_copilot_token():
content=content
)
@app.post("/v1/engines/codegen/completions")
# Used to support copilot.vim
# Used to support copilot.vim
@app.post("/v1/engines/copilot-codex/completions")
@app.post("/v1/completions")
async def completions(data: OpenAIinput):

View file

@ -2,9 +2,11 @@ from typing import Optional, Union
from pydantic import BaseModel, constr
ModelType = constr(regex="^(fastertransformer|py-model)$")
class OpenAIinput(BaseModel):
model: constr(regex="^(fastertransformer|py-model)$") = "fastertransformer"
model: ModelType = "fastertransformer"
prompt: Optional[str]
suffix: Optional[str]
max_tokens: Optional[int] = 16
@ -20,4 +22,3 @@ class OpenAIinput(BaseModel):
best_of: Optional[int] = 1
logit_bias: Optional[dict]
user: Optional[str]

View file

@ -1,10 +1,12 @@
from typing import *
from typing import Optional
class FauxPilotException(Exception):
def __init__(self, message: str, type: Optional[str] = None, param: Optional[str] = None, code: Optional[int] = None):
def __init__(self, message: str, error_type: Optional[str] = None, param: Optional[str] = None,
code: Optional[int] = None):
super().__init__(message)
self.message = message
self.type = type
self.error_type = error_type
self.param = param
self.code = code
@ -12,8 +14,8 @@ class FauxPilotException(Exception):
return {
'error': {
'message': self.message,
'type': self.type,
'type': self.error_type,
'param': self.param,
'code': self.code
}
}
}