This commit is contained in:
21
shuffle/backend/app_gen/LICENSE
Normal file
21
shuffle/backend/app_gen/LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Frikkylikeme
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
109
shuffle/backend/app_gen/openapi-parsers/misp.py
Normal file
109
shuffle/backend/app_gen/openapi-parsers/misp.py
Normal file
@ -0,0 +1,109 @@
|
||||
import json
|
||||
import yaml
|
||||
|
||||
items = []
|
||||
|
||||
openapi = {
|
||||
"openapi": "3.0.2",
|
||||
"info": {
|
||||
"title": "MISP",
|
||||
"description": "MISP API generated from the misp book: https://github.com/MISP/misp-book/blob/master/automation/README.md",
|
||||
"version": "1.0.0",
|
||||
"contact": {
|
||||
"name": "@frikkylikeme",
|
||||
"url": "https://twitter.com/frikkylikeme",
|
||||
"email": "frikky@shuffler.io"
|
||||
}
|
||||
},
|
||||
"paths": {},
|
||||
"components": {
|
||||
"schemas": {},
|
||||
"securitySchemes": {
|
||||
"ApiKeyAuth": {
|
||||
"type": "apikey",
|
||||
"in": "header",
|
||||
"name": "Authorization",
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
with open("misp.txt", "r") as tmp:
|
||||
newitem = {}
|
||||
recorditem = False
|
||||
|
||||
counter = 0
|
||||
itemsplit = tmp.read().split("\n")
|
||||
for item in itemsplit:
|
||||
counter += 1
|
||||
if item.startswith("### ") and "/" in item:
|
||||
try:
|
||||
path = item.split(" ")[2]
|
||||
method = item.split(" ")[1].lower()
|
||||
newitem = {
|
||||
"path": path,
|
||||
"method": method,
|
||||
}
|
||||
|
||||
try:
|
||||
openapi["paths"][path][method] = {}
|
||||
except KeyError:
|
||||
openapi["paths"][path] = {}
|
||||
openapi["paths"][path][method] = {}
|
||||
|
||||
except IndexError:
|
||||
newitem = {}
|
||||
continue
|
||||
|
||||
recorditem = True
|
||||
#print(newitem)
|
||||
|
||||
if not recorditem:
|
||||
continue
|
||||
|
||||
if "Description" in item:
|
||||
openapi["paths"][newitem["path"]][newitem["method"]]["description"] = itemsplit[counter+1]
|
||||
elif "URL Arguments" in item:
|
||||
parameters = []
|
||||
innercnt = 0
|
||||
|
||||
openapi["paths"][newitem["path"]][newitem["method"]]["parameters"] = []
|
||||
while True:
|
||||
curline = itemsplit[counter+1+innercnt]
|
||||
if "#" in curline:
|
||||
break
|
||||
|
||||
innercnt += 1
|
||||
if not curline:
|
||||
continue
|
||||
|
||||
print(curline)
|
||||
parameters.append({
|
||||
"description": curline.split(" ")[1],
|
||||
"in": "query",
|
||||
"name": curline.split(" ")[1],
|
||||
"required": True,
|
||||
"schema": {"type": "string"},
|
||||
})
|
||||
|
||||
openapi["paths"][newitem["path"]][newitem["method"]]["parameters"] = parameters
|
||||
elif "Output" in item:
|
||||
# FIXME
|
||||
innercnt = 0
|
||||
while True:
|
||||
curline = itemsplit[counter+1+innercnt]
|
||||
|
||||
if "#" in curline:
|
||||
break
|
||||
|
||||
innercnt += 1
|
||||
if "json" in curline:
|
||||
continue
|
||||
#print(curline)
|
||||
|
||||
print(json.dumps(openapi, indent=4))
|
||||
|
||||
|
||||
generatedfile = "generated/misp.yaml"
|
||||
with open(generatedfile, "w+") as tmp:
|
||||
tmp.write(yaml.dump(openapi))
|
311
shuffle/backend/app_gen/openapi-parsers/swimlane.py
Normal file
311
shuffle/backend/app_gen/openapi-parsers/swimlane.py
Normal file
@ -0,0 +1,311 @@
|
||||
import requests
|
||||
import yaml
|
||||
import json
|
||||
import os
|
||||
import io
|
||||
import base64
|
||||
from PIL import Image
|
||||
#import tkinter
|
||||
#import _tkinter
|
||||
#tkinter._test()
|
||||
|
||||
|
||||
#sudo apt-get install python-imaging-tk
|
||||
#sudo apt-get install python3-tk
|
||||
|
||||
# USAGE:
|
||||
# 1. Find the item here:
|
||||
# https://apphub.swimlane.com/swimbundles/swimlane/sw_alienvault_threatcrowd
|
||||
# 2.
|
||||
# https://jsonlint.com/
|
||||
|
||||
# META: data["meta"]. Stuff like count. May be useful :)
|
||||
|
||||
def parse_data(data):
|
||||
openapi = {
|
||||
"openapi": "3.0.2",
|
||||
"info": {
|
||||
"title": "",
|
||||
"description": "",
|
||||
"version": "1.0.0",
|
||||
"contact": {
|
||||
"name": "@frikkylikeme",
|
||||
"url": "https://twitter.com/frikkylikeme",
|
||||
"email": "frikky@shuffler.io"
|
||||
}
|
||||
},
|
||||
"paths": {},
|
||||
"components": {
|
||||
"schemas": {},
|
||||
"securitySchemes": {},
|
||||
}
|
||||
}
|
||||
|
||||
data = data["swimbundle"]
|
||||
filename = "%s.yaml" % data["product"].replace(" ", "_").lower()
|
||||
openapi["info"]["title"] = "%s %s" % (data["vendor"], data["product"])
|
||||
openapi["info"]["description"] = "Automated generation of %s" % (openapi["info"]["title"])
|
||||
# data["description"]
|
||||
|
||||
# https://swagger.io/docs/specification/authentication/
|
||||
try:
|
||||
asset = data["asset"]
|
||||
inputparams = asset["inputParameters"]
|
||||
|
||||
try:
|
||||
openapi["servers"] = [inputparams["api_url"]["example"]]
|
||||
except KeyError as e:
|
||||
#print(inputparams)
|
||||
#print("Field error: %s" % e)
|
||||
pass
|
||||
|
||||
authset = False
|
||||
try:
|
||||
tmpauth = inputparams["api_user"]
|
||||
tmpauth = inputparams["api_key"]
|
||||
|
||||
openapi["components"]["securitySchemes"] = {
|
||||
"BasicAuth": {
|
||||
"type": "http",
|
||||
"scheme": "basic"
|
||||
}
|
||||
}
|
||||
authset = True
|
||||
except KeyError as e:
|
||||
pass
|
||||
|
||||
try:
|
||||
tmpauth = inputparams["username"]
|
||||
tmpauth = inputparams["password"]
|
||||
|
||||
openapi["components"]["securitySchemes"] = {
|
||||
"BasicAuth": {
|
||||
"type": "http",
|
||||
"scheme": "basic"
|
||||
}
|
||||
}
|
||||
authset = True
|
||||
except KeyError as e:
|
||||
pass
|
||||
|
||||
#if not authset:
|
||||
# print("AUTH NOT SET: %s" % inputparams)
|
||||
|
||||
except KeyError as e:
|
||||
print("KeyError asset: %s" % e)
|
||||
|
||||
cnt = 0
|
||||
paramnames = []
|
||||
for task in data["tasks"]:
|
||||
method = "post"
|
||||
|
||||
openapi["paths"]["tmp%d" % cnt] = {}
|
||||
openapi["paths"]["tmp%d" % cnt][method] = {
|
||||
"summary": task["name"],
|
||||
"description": task["description"],
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful request",
|
||||
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
taskcategory = task["family"]
|
||||
taskname = task["name"]
|
||||
paramnames.append(taskname)
|
||||
|
||||
for key, value in task["inputParameters"].items():
|
||||
schema = "string"
|
||||
inVar = "query"
|
||||
|
||||
if value["type"] == 6:
|
||||
inVar = "body"
|
||||
|
||||
schema = "string"
|
||||
schemaset = False
|
||||
if value["type"] != 1:
|
||||
if (value["type"] == 7):
|
||||
schema = "boolean"
|
||||
schemaset = True
|
||||
|
||||
if schema == "string" and schemaset:
|
||||
print("Should change type: %d" % value["type"])
|
||||
print(task["name"])
|
||||
print(value["name"])
|
||||
|
||||
example = ""
|
||||
try:
|
||||
example = value["example"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
description = ""
|
||||
try:
|
||||
description = value["description"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
required = False
|
||||
try:
|
||||
required = value["required"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
openapi["paths"]["tmp%d" % cnt][method]["parameters"].append({
|
||||
"name": value["name"],
|
||||
"required": required,
|
||||
"example": example,
|
||||
"description": description,
|
||||
"schema": {"type": schema},
|
||||
"in": inVar
|
||||
})
|
||||
|
||||
if len(task["availableOutputVariables"]) > 0:
|
||||
openapi["paths"]["tmp%d" % cnt][method]["responses"]["200"]["content"] = {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/tmp%d" % cnt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#responses:
|
||||
# '200':
|
||||
# content:
|
||||
# application/json:
|
||||
# schema:
|
||||
# $ref: '#/components/schemas/tmp1'
|
||||
#description: Successful request
|
||||
|
||||
openapi["components"]["schemas"]["tmp%d" % cnt] = {
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
}
|
||||
|
||||
for key, value in task["availableOutputVariables"].items():
|
||||
if key == "response_code":
|
||||
continue
|
||||
|
||||
openapi["components"]["schemas"]["tmp%d" % cnt]["properties"][key] = {
|
||||
"type": "string"
|
||||
}
|
||||
|
||||
cnt += 1
|
||||
|
||||
print("%s: %d" % (openapi["info"]["title"], len(paramnames)))
|
||||
|
||||
return filename, openapi
|
||||
|
||||
def dump_data(filename, openapi, category):
|
||||
generatedfile = "generated/%s/%s" % (category, filename)
|
||||
try:
|
||||
with open(generatedfile, "w+") as tmp:
|
||||
tmp.write(yaml.dump(openapi))
|
||||
except FileNotFoundError:
|
||||
try:
|
||||
os.mkdir("generated/%s" % category)
|
||||
with open(generatedfile, "w+") as tmp:
|
||||
tmp.write(yaml.dump(openapi))
|
||||
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
#https://apphub.swimlane.com/
|
||||
categories = [
|
||||
"Investigation",
|
||||
"Endpoint Security & Management",
|
||||
"Network Security & Management",
|
||||
"Communication",
|
||||
"SIEM & Log Management",
|
||||
"Governance & Risk Management",
|
||||
"Vulnerability & Patch Management",
|
||||
"Ticket Management",
|
||||
"DevOps & Application Security",
|
||||
"Identity & Access Management",
|
||||
"Infrastructure",
|
||||
"Miscellaneous",
|
||||
]
|
||||
|
||||
search_category = categories[2]
|
||||
total = 0
|
||||
for search_category in categories:
|
||||
number = 1
|
||||
innertotal = 0
|
||||
|
||||
while(True):
|
||||
url = "https://apphub.swimlane.io/api/search/swimbundles?page=%d" % number
|
||||
|
||||
json = {"fields": {"family": search_category}}
|
||||
ret = requests.post(
|
||||
url,
|
||||
json=json,
|
||||
)
|
||||
|
||||
if ret.status_code != 201:
|
||||
print("RET NOT 201: %d" % ret.status_code)
|
||||
break
|
||||
|
||||
parsed = ret.json()
|
||||
try:
|
||||
category = parsed["data"][0]["swimbundleMeta"]["family"][0]
|
||||
except KeyError:
|
||||
category = ""
|
||||
except IndexError:
|
||||
category = ""
|
||||
|
||||
if category == "":
|
||||
break
|
||||
|
||||
for data in parsed["data"]:
|
||||
try:
|
||||
filename, openapi = parse_data(data)
|
||||
except:
|
||||
try:
|
||||
print("Skipping %s %s because of an error" % (data["vendor"], data["product"]))
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
continue
|
||||
|
||||
openapi["tags"] = [
|
||||
{
|
||||
"name": category,
|
||||
}
|
||||
]
|
||||
|
||||
appid = data["swimbundleMeta"]["logo"]["id"]
|
||||
logoUrl = "https://apphub.swimlane.io/api/logos/%s" % appid
|
||||
logodata = requests.get(logoUrl)
|
||||
if logodata.status_code == 200:
|
||||
logojson = logodata.json()
|
||||
try:
|
||||
logobase64 = logojson["data"]["base64"]
|
||||
#.split(",")[1]
|
||||
|
||||
openapi["info"]["x-logo"] = logobase64
|
||||
#print(logobase64)
|
||||
#msg = base64.b64decode(logobase64)
|
||||
#with io.BytesIO(msg) as buf:
|
||||
# with Image.open(buf) as tempImg:
|
||||
# newWidth = 174 / tempImg.width # change this to what ever width you need.
|
||||
# newHeight = 174 / tempImg.height # change this to what ever height you need.
|
||||
# newSize = (int(newWidth * tempImg.width), int(newHeight * tempImg.height))
|
||||
# newImg1 = tempImg.resize(newSize)
|
||||
# lbl1.IMG = ImageTk.PhotoImage(image=newImg1)
|
||||
# lbl1.configure(image=lbl1.IMG)
|
||||
except KeyError:
|
||||
print("Failed logo parsing for %s" % appid)
|
||||
pass
|
||||
|
||||
dump_data(filename, openapi, category)
|
||||
innertotal += 1
|
||||
total += 1
|
||||
|
||||
number += 1
|
||||
|
||||
print("Created %d openapi specs from Swimlane with category %s" % (innertotal, search_category))
|
||||
|
||||
print("\nCreated %d TOTAL openapi specs from Swimlane" % (total))
|
7
shuffle/backend/app_gen/openapi/README.md
Normal file
7
shuffle/backend/app_gen/openapi/README.md
Normal file
@ -0,0 +1,7 @@
|
||||
# OpenAPI generator
|
||||
This contains test code that's been moved to shaffuru/backend/go-app/codegen.go
|
||||
|
||||
## Todo:
|
||||
1. Don't use filesystem, but rather store in GCP
|
||||
2. Add swagger 2.0 to 3.0 converter
|
||||
3. Fix body / data parsing
|
29
shuffle/backend/app_gen/openapi/baseline/Dockerfile
Normal file
29
shuffle/backend/app_gen/openapi/baseline/Dockerfile
Normal file
@ -0,0 +1,29 @@
|
||||
# Base our app image off of the WALKOFF App SDK image
|
||||
FROM frikky/shuffle:app_sdk as base
|
||||
|
||||
# We're going to stage away all of the bloat from the build tools so lets create a builder stage
|
||||
#FROM base as builder
|
||||
|
||||
# Install all alpine build tools needed for our pip installs
|
||||
#RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev
|
||||
|
||||
# Install all of our pip packages in a single directory that we can copy to our base image later
|
||||
#RUN mkdir /install
|
||||
#WORKDIR /install
|
||||
#COPY requirements.txt /requirements.txt
|
||||
#
|
||||
## Switch back to our base image and copy in all of our built packages and source code
|
||||
#FROM base
|
||||
#COPY --from=builder /install /usr/local
|
||||
|
||||
|
||||
# Install any binary dependencies needed in our final image - this can be a lot of different stuff
|
||||
#RUN apk --no-cache add --update libmagic
|
||||
WORKDIR /
|
||||
COPY requirements.txt /requirements.txt
|
||||
RUN pip install --prefix="/usr/local" -r /requirements.txt
|
||||
COPY src /app
|
||||
|
||||
# Finally, lets run our app!
|
||||
WORKDIR /app
|
||||
CMD python app.py --log-level DEBUG
|
@ -0,0 +1,3 @@
|
||||
# No extra requirements needed
|
||||
requests
|
||||
urllib3
|
387
shuffle/backend/app_gen/openapi/test.go
Normal file
387
shuffle/backend/app_gen/openapi/test.go
Normal file
@ -0,0 +1,387 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/getkin/kin-openapi/openapi3"
|
||||
"gopkg.in/yaml.v2"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type WorkflowApp struct {
|
||||
Name string `json:"name" yaml:"name" required:true datastore:"name"`
|
||||
IsValid bool `json:"is_valid" yaml:"is_valid" required:true datastore:"is_valid"`
|
||||
ID string `json:"id" yaml:"id,omitempty" required:false datastore:"id"`
|
||||
Link string `json:"link" yaml:"link" required:false datastore:"link,noindex"`
|
||||
AppVersion string `json:"app_version" yaml:"app_version" required:true datastore:"app_version"`
|
||||
Description string `json:"description" datastore:"description" required:false yaml:"description"`
|
||||
Environment string `json:"environment" datastore:"environment" required:true yaml:"environment"`
|
||||
SmallImage string `json:"small_image" datastore:"small_image,noindex" required:false yaml:"small_image"`
|
||||
LargeImage string `json:"large_image" datastore:"large_image,noindex" yaml:"large_image" required:false`
|
||||
ContactInfo struct {
|
||||
Name string `json:"name" datastore:"name" yaml:"name"`
|
||||
Url string `json:"url" datastore:"url" yaml:"url"`
|
||||
} `json:"contact_info" datastore:"contact_info" yaml:"contact_info" required:false`
|
||||
Actions []WorkflowAppAction `json:"actions" yaml:"actions" required:true datastore:"actions"`
|
||||
Authentication Authentication `json:"authentication" yaml:"authentication" required:false datastore:"authentication"`
|
||||
}
|
||||
|
||||
type AuthenticationParams struct {
|
||||
Description string `json:"description" datastore:"description" yaml:"description"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id"`
|
||||
Name string `json:"name" datastore:"name" yaml:"name"`
|
||||
Example string `json:"example" datastore:"example" yaml:"example"s`
|
||||
Value string `json:"value,omitempty" datastore:"value" yaml:"value"`
|
||||
Multiline bool `json:"multiline" datastore:"multiline" yaml:"multiline"`
|
||||
Required bool `json:"required" datastore:"required" yaml:"required"`
|
||||
}
|
||||
|
||||
type Authentication struct {
|
||||
Required bool `json:"required" datastore:"required" yaml:"required" `
|
||||
Parameters []AuthenticationParams `json:"parameters" datastore:"parameters" yaml:"parameters"`
|
||||
}
|
||||
|
||||
type AuthenticationStore struct {
|
||||
Key string `json:"key" datastore:"key"`
|
||||
Value string `json:"value" datastore:"value"`
|
||||
}
|
||||
|
||||
type WorkflowAppActionParameter struct {
|
||||
Description string `json:"description" datastore:"description" yaml:"description"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id,omitempty"`
|
||||
Name string `json:"name" datastore:"name" yaml:"name"`
|
||||
Example string `json:"example" datastore:"example" yaml:"example"`
|
||||
Value string `json:"value" datastore:"value" yaml:"value,omitempty"`
|
||||
Multiline bool `json:"multiline" datastore:"multiline" yaml:"multiline"`
|
||||
ActionField string `json:"action_field" datastore:"action_field" yaml:"actionfield,omitempty"`
|
||||
Variant string `json:"variant" datastore:"variant" yaml:"variant,omitempty"`
|
||||
Required bool `json:"required" datastore:"required" yaml:"required"`
|
||||
Schema SchemaDefinition `json:"schema" datastore:"schema" yaml:"schema"`
|
||||
}
|
||||
|
||||
type SchemaDefinition struct {
|
||||
Type string `json:"type" datastore:"type"`
|
||||
}
|
||||
|
||||
type WorkflowAppAction struct {
|
||||
Description string `json:"description" datastore:"description"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id,omitempty"`
|
||||
Name string `json:"name" datastore:"name"`
|
||||
NodeType string `json:"node_type" datastore:"node_type"`
|
||||
Environment string `json:"environment" datastore:"environment"`
|
||||
Authentication []AuthenticationStore `json:"authentication" datastore:"authentication" yaml:"authentication,omitempty"`
|
||||
Parameters []WorkflowAppActionParameter `json:"parameters" datastore: "parameters"`
|
||||
Returns struct {
|
||||
Description string `json:"description" datastore:"returns" yaml:"description,omitempty"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id,omitempty"`
|
||||
Schema SchemaDefinition `json:"schema" datastore:"schema" yaml:"schema"`
|
||||
} `json:"returns" datastore:"returns"`
|
||||
}
|
||||
|
||||
func copyFile(fromfile, tofile string) error {
|
||||
from, err := os.Open(fromfile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer from.Close()
|
||||
|
||||
to, err := os.OpenFile(tofile, os.O_RDWR|os.O_CREATE, 0666)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer to.Close()
|
||||
|
||||
_, err = io.Copy(to, from)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Builds the base structure for the app that we're making
|
||||
// Returns error if anything goes wrong. This has to work if
|
||||
// the python code is supposed to be generated
|
||||
func buildStructure(swagger *openapi3.Swagger, curHash string) (string, error) {
|
||||
//log.Printf("%#v", swagger)
|
||||
|
||||
// adding md5 based on input data to not overwrite earlier data.
|
||||
generatedPath := "generated"
|
||||
identifier := fmt.Sprintf("%s-%s", swagger.Info.Title, curHash)
|
||||
appPath := fmt.Sprintf("%s/%s", generatedPath, identifier)
|
||||
|
||||
os.MkdirAll(appPath, os.ModePerm)
|
||||
os.Mkdir(fmt.Sprintf("%s/src", appPath), os.ModePerm)
|
||||
|
||||
err := copyFile("baseline/Dockerfile", fmt.Sprintf("%s/%s", appPath, "Dockerfile"))
|
||||
if err != nil {
|
||||
log.Println("Failed to move Dockerfile")
|
||||
return appPath, err
|
||||
}
|
||||
|
||||
err = copyFile("baseline/requirements.txt", fmt.Sprintf("%s/%s", appPath, "requirements.txt"))
|
||||
if err != nil {
|
||||
log.Println("Failed to move requrements.txt")
|
||||
return appPath, err
|
||||
}
|
||||
|
||||
return appPath, nil
|
||||
}
|
||||
|
||||
func makePythoncode(name, url, method string, parameters, optionalQueries []string) string {
|
||||
method = strings.ToLower(method)
|
||||
queryString := ""
|
||||
queryData := ""
|
||||
|
||||
// FIXME - this might break - need to check if ? or & should be set as query
|
||||
parameterData := ""
|
||||
if len(optionalQueries) > 0 {
|
||||
queryString += ", "
|
||||
for _, query := range optionalQueries {
|
||||
queryString += fmt.Sprintf("%s=\"\"", query)
|
||||
queryData += fmt.Sprintf(`
|
||||
if %s:
|
||||
url += f"&%s={%s}"`, query, query, query)
|
||||
}
|
||||
}
|
||||
|
||||
if len(parameters) > 0 {
|
||||
parameterData = fmt.Sprintf(", %s", strings.Join(parameters, ", "))
|
||||
}
|
||||
|
||||
// FIXME - add checks for query data etc
|
||||
data := fmt.Sprintf(` async def %s_%s(self%s%s):
|
||||
url=f"%s"
|
||||
%s
|
||||
return requests.%s(url).text
|
||||
`, name, method, parameterData, queryString, url, queryData, method)
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
func generateYaml(swagger *openapi3.Swagger) (WorkflowApp, []string, error) {
|
||||
api := WorkflowApp{}
|
||||
log.Printf("%#v", swagger.Info)
|
||||
|
||||
if len(swagger.Info.Title) == 0 {
|
||||
return WorkflowApp{}, []string{}, errors.New("Swagger.Info.Title can't be empty.")
|
||||
}
|
||||
|
||||
if len(swagger.Servers) == 0 {
|
||||
return WorkflowApp{}, []string{}, errors.New("Swagger.Servers can't be empty. Add 'servers':[{'url':'hostname.com'}'")
|
||||
}
|
||||
|
||||
api.Name = swagger.Info.Title
|
||||
api.Description = swagger.Info.Description
|
||||
api.IsValid = true
|
||||
api.Link = swagger.Servers[0].URL // host does not exist lol
|
||||
api.AppVersion = "1.0.0"
|
||||
api.Environment = "cloud"
|
||||
api.ID = ""
|
||||
api.SmallImage = ""
|
||||
api.LargeImage = ""
|
||||
|
||||
// This is the python code to be generated
|
||||
// Could just as well be go at this point lol
|
||||
pythonFunctions := []string{}
|
||||
|
||||
for actualPath, path := range swagger.Paths {
|
||||
//log.Printf("%#v", path)
|
||||
//log.Printf("%#v", actualPath)
|
||||
// Find the path name and add it to makeCode() param
|
||||
|
||||
firstQuery := true
|
||||
if path.Get != nil {
|
||||
// What to do with this, hmm
|
||||
functionName := strings.ReplaceAll(path.Get.Summary, " ", "_")
|
||||
functionName = strings.ToLower(functionName)
|
||||
|
||||
action := WorkflowAppAction{
|
||||
Description: path.Get.Description,
|
||||
Name: path.Get.Summary,
|
||||
NodeType: "action",
|
||||
Environment: api.Environment,
|
||||
Parameters: []WorkflowAppActionParameter{},
|
||||
}
|
||||
|
||||
action.Returns.Schema.Type = "string"
|
||||
baseUrl := fmt.Sprintf("%s%s", api.Link, actualPath)
|
||||
|
||||
//log.Println(path.Parameters)
|
||||
|
||||
// Parameters: []WorkflowAppActionParameter{},
|
||||
// FIXME - add data for POST stuff
|
||||
firstQuery = true
|
||||
optionalQueries := []string{}
|
||||
parameters := []string{}
|
||||
optionalParameters := []WorkflowAppActionParameter{}
|
||||
if len(path.Get.Parameters) > 0 {
|
||||
for _, param := range path.Get.Parameters {
|
||||
curParam := WorkflowAppActionParameter{
|
||||
Name: param.Value.Name,
|
||||
Description: param.Value.Description,
|
||||
Multiline: false,
|
||||
Required: param.Value.Required,
|
||||
Schema: SchemaDefinition{
|
||||
Type: param.Value.Schema.Value.Type,
|
||||
},
|
||||
}
|
||||
|
||||
if param.Value.Required {
|
||||
action.Parameters = append(action.Parameters, curParam)
|
||||
} else {
|
||||
optionalParameters = append(optionalParameters, curParam)
|
||||
}
|
||||
|
||||
if param.Value.In == "path" {
|
||||
log.Printf("PATH!: %s", param.Value.Name)
|
||||
parameters = append(parameters, param.Value.Name)
|
||||
//baseUrl = fmt.Sprintf("%s%s", baseUrl)
|
||||
} else if param.Value.In == "query" {
|
||||
log.Printf("QUERY!: %s", param.Value.Name)
|
||||
if !param.Value.Required {
|
||||
optionalQueries = append(optionalQueries, param.Value.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
parameters = append(parameters, param.Value.Name)
|
||||
|
||||
if firstQuery {
|
||||
baseUrl = fmt.Sprintf("%s?%s={%s}", baseUrl, param.Value.Name, param.Value.Name)
|
||||
firstQuery = false
|
||||
} else {
|
||||
baseUrl = fmt.Sprintf("%s&%s={%s}", baseUrl, param.Value.Name, param.Value.Name)
|
||||
firstQuery = false
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// ensuring that they end up last in the specification
|
||||
// (order is ish important for optional params) - they need to be last.
|
||||
for _, optionalParam := range optionalParameters {
|
||||
action.Parameters = append(action.Parameters, optionalParam)
|
||||
}
|
||||
|
||||
curCode := makePythoncode(functionName, baseUrl, "get", parameters, optionalQueries)
|
||||
pythonFunctions = append(pythonFunctions, curCode)
|
||||
|
||||
api.Actions = append(api.Actions, action)
|
||||
}
|
||||
}
|
||||
|
||||
return api, pythonFunctions, nil
|
||||
}
|
||||
|
||||
func verifyApi(api WorkflowApp) WorkflowApp {
|
||||
if api.AppVersion == "" {
|
||||
api.AppVersion = "1.0.0"
|
||||
}
|
||||
|
||||
return api
|
||||
}
|
||||
|
||||
func dumpPython(basePath, name, version string, pythonFunctions []string) error {
|
||||
//log.Printf("%#v", api)
|
||||
log.Printf(strings.Join(pythonFunctions, "\n"))
|
||||
|
||||
parsedCode := fmt.Sprintf(`import requests
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from walkoff_app_sdk.app_base import AppBase
|
||||
|
||||
class %s(AppBase):
|
||||
"""
|
||||
Autogenerated class by Shuffler
|
||||
"""
|
||||
|
||||
__version__ = "%s"
|
||||
app_name = "%s"
|
||||
|
||||
def __init__(self, redis, logger, console_logger=None):
|
||||
self.verify = False
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
super().__init__(redis, logger, console_logger)
|
||||
|
||||
%s
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CarbonBlack.run(), debug=True)
|
||||
`, name, version, name, strings.Join(pythonFunctions, "\n"))
|
||||
|
||||
err := ioutil.WriteFile(fmt.Sprintf("%s/src/app.py", basePath), []byte(parsedCode), os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Println(parsedCode)
|
||||
|
||||
//log.Println(string(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func dumpApi(basePath string, api WorkflowApp) error {
|
||||
//log.Printf("%#v", api)
|
||||
data, err := yaml.Marshal(api)
|
||||
if err != nil {
|
||||
log.Printf("Error with yaml marshal: %s", err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = ioutil.WriteFile(fmt.Sprintf("%s/api.yaml", basePath), []byte(data), os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//log.Println(string(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
data := []byte(`{"swagger":"3.0","info":{"title":"hi","description":"you","version":"1.0"},"servers":[{"url":"https://shuffler.io/api/v1"}],"host":"shuffler.io","basePath":"/api/v1","schemes":["https:"],"paths":{"/workflows":{"get":{"responses":{"default":{"description":"default","schema":{}}},"summary":"Get workflows","description":"Get workflows","parameters":[]}},"/workflows/{id}":{"get":{"responses":{"default":{"description":"default","schema":{}}},"summary":"Get workflow","description":"Get workflow","parameters":[{"in":"query","name":"forgetme","description":"Generated by shuffler.io OpenAPI","required":true,"schema":{"type":"string"}},{"in":"query","name":"anotherone","description":"Generated by shuffler.io OpenAPI","required":false,"schema":{"type":"string"}},{"in":"query","name":"hi","description":"Generated by shuffler.io OpenAPI","required":true,"schema":{"type":"string"}},{"in":"path","name":"id","description":"Generated by shuffler.io OpenAPI","required":true,"schema":{"type":"string"}}]}}},"securityDefinitions":{}}`)
|
||||
|
||||
hasher := md5.New()
|
||||
hasher.Write(data)
|
||||
newmd5 := hex.EncodeToString(hasher.Sum(nil))
|
||||
|
||||
swagger, err := openapi3.NewSwaggerLoader().LoadSwaggerFromData(data)
|
||||
if err != nil {
|
||||
log.Printf("Swagger validation error: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
if strings.Contains(swagger.Info.Title, " ") {
|
||||
strings.ReplaceAll(swagger.Info.Title, " ", "")
|
||||
}
|
||||
|
||||
basePath, err := buildStructure(swagger, newmd5)
|
||||
if err != nil {
|
||||
log.Printf("Failed to build base structure: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
api, pythonfunctions, err := generateYaml(swagger)
|
||||
if err != nil {
|
||||
log.Printf("Failed building and generating yaml: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
err = dumpApi(basePath, api)
|
||||
if err != nil {
|
||||
log.Printf("Failed dumping yaml: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
err = dumpPython(basePath, swagger.Info.Title, swagger.Info.Version, pythonfunctions)
|
||||
if err != nil {
|
||||
log.Printf("Failed dumping python: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
}
|
499
shuffle/backend/app_gen/openapi/testGCP.go
Normal file
499
shuffle/backend/app_gen/openapi/testGCP.go
Normal file
@ -0,0 +1,499 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
Code used to generate apps from OpenAPI JSON data
|
||||
Any function ending with GCP doesn't use local fileIO, but rather
|
||||
google cloud storage, and also has a normal filesystem version of the
|
||||
same code (doesn't required client as first argument).
|
||||
|
||||
This code is used in the backend to generate apps on the fly for users.
|
||||
All new code is appended to backend/go-app/codegen.go
|
||||
*/
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"cloud.google.com/go/storage"
|
||||
"github.com/getkin/kin-openapi/openapi3"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var bucketName = "shuffler.appspot.com"
|
||||
|
||||
type WorkflowApp struct {
|
||||
Name string `json:"name" yaml:"name" required:true datastore:"name"`
|
||||
IsValid bool `json:"is_valid" yaml:"is_valid" required:true datastore:"is_valid"`
|
||||
ID string `json:"id" yaml:"id,omitempty" required:false datastore:"id"`
|
||||
Link string `json:"link" yaml:"link" required:false datastore:"link,noindex"`
|
||||
AppVersion string `json:"app_version" yaml:"app_version" required:true datastore:"app_version"`
|
||||
Description string `json:"description" datastore:"description" required:false yaml:"description"`
|
||||
Environment string `json:"environment" datastore:"environment" required:true yaml:"environment"`
|
||||
SmallImage string `json:"small_image" datastore:"small_image,noindex" required:false yaml:"small_image"`
|
||||
LargeImage string `json:"large_image" datastore:"large_image,noindex" yaml:"large_image" required:false`
|
||||
ContactInfo struct {
|
||||
Name string `json:"name" datastore:"name" yaml:"name"`
|
||||
Url string `json:"url" datastore:"url" yaml:"url"`
|
||||
} `json:"contact_info" datastore:"contact_info" yaml:"contact_info" required:false`
|
||||
Actions []WorkflowAppAction `json:"actions" yaml:"actions" required:true datastore:"actions"`
|
||||
Authentication Authentication `json:"authentication" yaml:"authentication" required:false datastore:"authentication"`
|
||||
}
|
||||
|
||||
type AuthenticationParams struct {
|
||||
Description string `json:"description" datastore:"description" yaml:"description"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id"`
|
||||
Name string `json:"name" datastore:"name" yaml:"name"`
|
||||
Example string `json:"example" datastore:"example" yaml:"example"s`
|
||||
Value string `json:"value,omitempty" datastore:"value" yaml:"value"`
|
||||
Multiline bool `json:"multiline" datastore:"multiline" yaml:"multiline"`
|
||||
Required bool `json:"required" datastore:"required" yaml:"required"`
|
||||
}
|
||||
|
||||
type Authentication struct {
|
||||
Required bool `json:"required" datastore:"required" yaml:"required" `
|
||||
Parameters []AuthenticationParams `json:"parameters" datastore:"parameters" yaml:"parameters"`
|
||||
}
|
||||
|
||||
type AuthenticationStore struct {
|
||||
Key string `json:"key" datastore:"key"`
|
||||
Value string `json:"value" datastore:"value"`
|
||||
}
|
||||
|
||||
type WorkflowAppActionParameter struct {
|
||||
Description string `json:"description" datastore:"description" yaml:"description"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id,omitempty"`
|
||||
Name string `json:"name" datastore:"name" yaml:"name"`
|
||||
Example string `json:"example" datastore:"example" yaml:"example"`
|
||||
Value string `json:"value" datastore:"value" yaml:"value,omitempty"`
|
||||
Multiline bool `json:"multiline" datastore:"multiline" yaml:"multiline"`
|
||||
ActionField string `json:"action_field" datastore:"action_field" yaml:"actionfield,omitempty"`
|
||||
Variant string `json:"variant" datastore:"variant" yaml:"variant,omitempty"`
|
||||
Required bool `json:"required" datastore:"required" yaml:"required"`
|
||||
Schema SchemaDefinition `json:"schema" datastore:"schema" yaml:"schema"`
|
||||
}
|
||||
|
||||
type SchemaDefinition struct {
|
||||
Type string `json:"type" datastore:"type"`
|
||||
}
|
||||
|
||||
type WorkflowAppAction struct {
|
||||
Description string `json:"description" datastore:"description"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id,omitempty"`
|
||||
Name string `json:"name" datastore:"name"`
|
||||
NodeType string `json:"node_type" datastore:"node_type"`
|
||||
Environment string `json:"environment" datastore:"environment"`
|
||||
Authentication []AuthenticationStore `json:"authentication" datastore:"authentication" yaml:"authentication,omitempty"`
|
||||
Parameters []WorkflowAppActionParameter `json:"parameters" datastore: "parameters"`
|
||||
Returns struct {
|
||||
Description string `json:"description" datastore:"returns" yaml:"description,omitempty"`
|
||||
ID string `json:"id" datastore:"id" yaml:"id,omitempty"`
|
||||
Schema SchemaDefinition `json:"schema" datastore:"schema" yaml:"schema"`
|
||||
} `json:"returns" datastore:"returns"`
|
||||
}
|
||||
|
||||
func copyFile(fromfile, tofile string) error {
|
||||
from, err := os.Open(fromfile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer from.Close()
|
||||
|
||||
to, err := os.OpenFile(tofile, os.O_RDWR|os.O_CREATE, 0666)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer to.Close()
|
||||
|
||||
_, err = io.Copy(to, from)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildStructureGCP(client *storage.Client, swagger *openapi3.Swagger, curHash string) (string, error) {
|
||||
ctx := context.Background()
|
||||
|
||||
// 1. Have baseline in bucket/generated_apps/baseline
|
||||
// 2. Copy the baseline to a new folder with identifier name
|
||||
|
||||
basePath := "generated_apps"
|
||||
identifier := fmt.Sprintf("%s-%s", swagger.Info.Title, curHash)
|
||||
appPath := fmt.Sprintf("%s/%s", basePath, identifier)
|
||||
fileNames := []string{"Dockerfile", "requirements.txt"}
|
||||
for _, file := range fileNames {
|
||||
src := client.Bucket(bucketName).Object(fmt.Sprintf("%s/baseline/%s", basePath, file))
|
||||
dst := client.Bucket(bucketName).Object(fmt.Sprintf("%s/%s", appPath, file))
|
||||
if _, err := dst.CopierFrom(src).Run(ctx); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return appPath, nil
|
||||
}
|
||||
|
||||
// Builds the base structure for the app that we're making
|
||||
// Returns error if anything goes wrong. This has to work if
|
||||
// the python code is supposed to be generated
|
||||
func buildStructure(swagger *openapi3.Swagger, curHash string) (string, error) {
|
||||
//log.Printf("%#v", swagger)
|
||||
|
||||
// adding md5 based on input data to not overwrite earlier data.
|
||||
generatedPath := "generated"
|
||||
identifier := fmt.Sprintf("%s-%s", swagger.Info.Title, curHash)
|
||||
appPath := fmt.Sprintf("%s/%s", generatedPath, identifier)
|
||||
|
||||
os.MkdirAll(appPath, os.ModePerm)
|
||||
os.Mkdir(fmt.Sprintf("%s/src", appPath), os.ModePerm)
|
||||
|
||||
err := copyFile("baseline/Dockerfile", fmt.Sprintf("%s/%s", appPath, "Dockerfile"))
|
||||
if err != nil {
|
||||
log.Println("Failed to move Dockerfile")
|
||||
return appPath, err
|
||||
}
|
||||
|
||||
err = copyFile("baseline/requirements.txt", fmt.Sprintf("%s/%s", appPath, "requirements.txt"))
|
||||
if err != nil {
|
||||
log.Println("Failed to move requrements.txt")
|
||||
return appPath, err
|
||||
}
|
||||
|
||||
return appPath, nil
|
||||
}
|
||||
|
||||
func makePythoncode(name, url, method string, parameters, optionalQueries []string) string {
|
||||
method = strings.ToLower(method)
|
||||
queryString := ""
|
||||
queryData := ""
|
||||
|
||||
// FIXME - this might break - need to check if ? or & should be set as query
|
||||
parameterData := ""
|
||||
if len(optionalQueries) > 0 {
|
||||
queryString += ", "
|
||||
for _, query := range optionalQueries {
|
||||
queryString += fmt.Sprintf("%s=\"\"", query)
|
||||
queryData += fmt.Sprintf(`
|
||||
if %s:
|
||||
url += f"&%s={%s}"`, query, query, query)
|
||||
}
|
||||
}
|
||||
|
||||
if len(parameters) > 0 {
|
||||
parameterData = fmt.Sprintf(", %s", strings.Join(parameters, ", "))
|
||||
}
|
||||
|
||||
// FIXME - add checks for query data etc
|
||||
data := fmt.Sprintf(` async def %s_%s(self%s%s):
|
||||
url=f"%s"
|
||||
%s
|
||||
return requests.%s(url).text
|
||||
`, name, method, parameterData, queryString, url, queryData, method)
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
func generateYaml(swagger *openapi3.Swagger) (WorkflowApp, []string, error) {
|
||||
api := WorkflowApp{}
|
||||
log.Printf("%#v", swagger.Info)
|
||||
|
||||
if len(swagger.Info.Title) == 0 {
|
||||
return WorkflowApp{}, []string{}, errors.New("Swagger.Info.Title can't be empty.")
|
||||
}
|
||||
|
||||
if len(swagger.Servers) == 0 {
|
||||
return WorkflowApp{}, []string{}, errors.New("Swagger.Servers can't be empty. Add 'servers':[{'url':'hostname.com'}'")
|
||||
}
|
||||
|
||||
api.Name = swagger.Info.Title
|
||||
api.Description = swagger.Info.Description
|
||||
api.IsValid = true
|
||||
api.Link = swagger.Servers[0].URL // host does not exist lol
|
||||
api.AppVersion = "1.0.0"
|
||||
api.Environment = "cloud"
|
||||
api.ID = ""
|
||||
api.SmallImage = ""
|
||||
api.LargeImage = ""
|
||||
|
||||
// This is the python code to be generated
|
||||
// Could just as well be go at this point lol
|
||||
pythonFunctions := []string{}
|
||||
|
||||
for actualPath, path := range swagger.Paths {
|
||||
//log.Printf("%#v", path)
|
||||
//log.Printf("%#v", actualPath)
|
||||
// Find the path name and add it to makeCode() param
|
||||
|
||||
firstQuery := true
|
||||
if path.Get != nil {
|
||||
// What to do with this, hmm
|
||||
functionName := strings.ReplaceAll(path.Get.Summary, " ", "_")
|
||||
functionName = strings.ToLower(functionName)
|
||||
|
||||
action := WorkflowAppAction{
|
||||
Description: path.Get.Description,
|
||||
Name: path.Get.Summary,
|
||||
NodeType: "action",
|
||||
Environment: api.Environment,
|
||||
Parameters: []WorkflowAppActionParameter{},
|
||||
}
|
||||
|
||||
action.Returns.Schema.Type = "string"
|
||||
baseUrl := fmt.Sprintf("%s%s", api.Link, actualPath)
|
||||
|
||||
//log.Println(path.Parameters)
|
||||
|
||||
// Parameters: []WorkflowAppActionParameter{},
|
||||
// FIXME - add data for POST stuff
|
||||
firstQuery = true
|
||||
optionalQueries := []string{}
|
||||
parameters := []string{}
|
||||
optionalParameters := []WorkflowAppActionParameter{}
|
||||
if len(path.Get.Parameters) > 0 {
|
||||
for _, param := range path.Get.Parameters {
|
||||
curParam := WorkflowAppActionParameter{
|
||||
Name: param.Value.Name,
|
||||
Description: param.Value.Description,
|
||||
Multiline: false,
|
||||
Required: param.Value.Required,
|
||||
Schema: SchemaDefinition{
|
||||
Type: param.Value.Schema.Value.Type,
|
||||
},
|
||||
}
|
||||
|
||||
if param.Value.Required {
|
||||
action.Parameters = append(action.Parameters, curParam)
|
||||
} else {
|
||||
optionalParameters = append(optionalParameters, curParam)
|
||||
}
|
||||
|
||||
if param.Value.In == "path" {
|
||||
log.Printf("PATH!: %s", param.Value.Name)
|
||||
parameters = append(parameters, param.Value.Name)
|
||||
//baseUrl = fmt.Sprintf("%s%s", baseUrl)
|
||||
} else if param.Value.In == "query" {
|
||||
log.Printf("QUERY!: %s", param.Value.Name)
|
||||
if !param.Value.Required {
|
||||
optionalQueries = append(optionalQueries, param.Value.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
parameters = append(parameters, param.Value.Name)
|
||||
|
||||
if firstQuery {
|
||||
baseUrl = fmt.Sprintf("%s?%s={%s}", baseUrl, param.Value.Name, param.Value.Name)
|
||||
firstQuery = false
|
||||
} else {
|
||||
baseUrl = fmt.Sprintf("%s&%s={%s}", baseUrl, param.Value.Name, param.Value.Name)
|
||||
firstQuery = false
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// ensuring that they end up last in the specification
|
||||
// (order is ish important for optional params) - they need to be last.
|
||||
for _, optionalParam := range optionalParameters {
|
||||
action.Parameters = append(action.Parameters, optionalParam)
|
||||
}
|
||||
|
||||
curCode := makePythoncode(functionName, baseUrl, "get", parameters, optionalQueries)
|
||||
pythonFunctions = append(pythonFunctions, curCode)
|
||||
|
||||
api.Actions = append(api.Actions, action)
|
||||
}
|
||||
}
|
||||
|
||||
return api, pythonFunctions, nil
|
||||
}
|
||||
|
||||
func verifyApi(api WorkflowApp) WorkflowApp {
|
||||
if api.AppVersion == "" {
|
||||
api.AppVersion = "1.0.0"
|
||||
}
|
||||
|
||||
return api
|
||||
}
|
||||
|
||||
func dumpPythonGCP(client *storage.Client, basePath, name, version string, pythonFunctions []string) error {
|
||||
//log.Printf("%#v", api)
|
||||
log.Printf(strings.Join(pythonFunctions, "\n"))
|
||||
|
||||
parsedCode := fmt.Sprintf(`import requests
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from walkoff_app_sdk.app_base import AppBase
|
||||
|
||||
class %s(AppBase):
|
||||
"""
|
||||
Autogenerated class by Shuffler
|
||||
"""
|
||||
|
||||
__version__ = "%s"
|
||||
app_name = "%s"
|
||||
|
||||
def __init__(self, redis, logger, console_logger=None):
|
||||
self.verify = False
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
super().__init__(redis, logger, console_logger)
|
||||
|
||||
%s
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CarbonBlack.run(), debug=True)
|
||||
`, name, version, name, strings.Join(pythonFunctions, "\n"))
|
||||
|
||||
// Create bucket handle
|
||||
ctx := context.Background()
|
||||
bucket := client.Bucket(bucketName)
|
||||
obj := bucket.Object(fmt.Sprintf("%s/src/app.py", basePath))
|
||||
w := obj.NewWriter(ctx)
|
||||
if _, err := fmt.Fprintf(w, parsedCode); err != nil {
|
||||
return err
|
||||
}
|
||||
// Close, just like writing a file.
|
||||
if err := w.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func dumpPython(basePath, name, version string, pythonFunctions []string) error {
|
||||
//log.Printf("%#v", api)
|
||||
log.Printf(strings.Join(pythonFunctions, "\n"))
|
||||
|
||||
parsedCode := fmt.Sprintf(`import requests
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from walkoff_app_sdk.app_base import AppBase
|
||||
|
||||
class %s(AppBase):
|
||||
"""
|
||||
Autogenerated class by Shuffler
|
||||
"""
|
||||
|
||||
__version__ = "%s"
|
||||
app_name = "%s"
|
||||
|
||||
def __init__(self, redis, logger, console_logger=None):
|
||||
self.verify = False
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
super().__init__(redis, logger, console_logger)
|
||||
|
||||
%s
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CarbonBlack.run(), debug=True)
|
||||
`, name, version, name, strings.Join(pythonFunctions, "\n"))
|
||||
|
||||
err := ioutil.WriteFile(fmt.Sprintf("%s/src/app.py", basePath), []byte(parsedCode), os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Println(parsedCode)
|
||||
|
||||
//log.Println(string(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func dumpApiGCP(client *storage.Client, basePath string, api WorkflowApp) error {
|
||||
//log.Printf("%#v", api)
|
||||
data, err := yaml.Marshal(api)
|
||||
if err != nil {
|
||||
log.Printf("Error with yaml marshal: %s", err)
|
||||
return err
|
||||
}
|
||||
|
||||
// Create bucket handle
|
||||
ctx := context.Background()
|
||||
bucket := client.Bucket(bucketName)
|
||||
obj := bucket.Object(fmt.Sprintf("%s/app.yaml", basePath))
|
||||
w := obj.NewWriter(ctx)
|
||||
if _, err := fmt.Fprintf(w, string(data)); err != nil {
|
||||
return err
|
||||
}
|
||||
// Close, just like writing a file.
|
||||
if err := w.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//log.Println(string(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func dumpApi(basePath string, api WorkflowApp) error {
|
||||
//log.Printf("%#v", api)
|
||||
data, err := yaml.Marshal(api)
|
||||
if err != nil {
|
||||
log.Printf("Error with yaml marshal: %s", err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = ioutil.WriteFile(fmt.Sprintf("%s/api.yaml", basePath), []byte(data), os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//log.Println(string(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
data := []byte(`{"swagger":"3.0","info":{"title":"hi","description":"you","version":"1.0"},"servers":[{"url":"https://shuffler.io/api/v1"}],"host":"shuffler.io","basePath":"/api/v1","schemes":["https:"],"paths":{"/workflows":{"get":{"responses":{"default":{"description":"default","schema":{}}},"summary":"Get workflows","description":"Get workflows","parameters":[]}},"/workflows/{id}":{"get":{"responses":{"default":{"description":"default","schema":{}}},"summary":"Get workflow","description":"Get workflow","parameters":[{"in":"query","name":"forgetme","description":"Generated by shuffler.io OpenAPI","required":true,"schema":{"type":"string"}},{"in":"query","name":"anotherone","description":"Generated by shuffler.io OpenAPI","required":false,"schema":{"type":"string"}},{"in":"query","name":"hi","description":"Generated by shuffler.io OpenAPI","required":true,"schema":{"type":"string"}},{"in":"path","name":"id","description":"Generated by shuffler.io OpenAPI","required":true,"schema":{"type":"string"}}]}}},"securityDefinitions":{}}`)
|
||||
|
||||
ctx := context.Background()
|
||||
client, err := storage.NewClient(ctx)
|
||||
if err != nil {
|
||||
log.Printf("Failed to create client: %v", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
hasher := md5.New()
|
||||
hasher.Write(data)
|
||||
newmd5 := hex.EncodeToString(hasher.Sum(nil))
|
||||
swagger, err := openapi3.NewSwaggerLoader().LoadSwaggerFromData(data)
|
||||
if err != nil {
|
||||
log.Printf("Swagger validation error: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
if strings.Contains(swagger.Info.Title, " ") {
|
||||
strings.ReplaceAll(swagger.Info.Title, " ", "")
|
||||
}
|
||||
|
||||
basePath, err := buildStructureGCP(client, swagger, newmd5)
|
||||
if err != nil {
|
||||
log.Printf("Failed to build base structure: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
api, pythonfunctions, err := generateYaml(swagger)
|
||||
if err != nil {
|
||||
log.Printf("Failed building and generating yaml: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
err = dumpApiGCP(client, basePath, api)
|
||||
if err != nil {
|
||||
log.Printf("Failed dumping yaml: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
|
||||
err = dumpPythonGCP(client, basePath, swagger.Info.Title, swagger.Info.Version, pythonfunctions)
|
||||
if err != nil {
|
||||
log.Printf("Failed dumping python: %s", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
}
|
5
shuffle/backend/app_gen/python-lib/README.md
Normal file
5
shuffle/backend/app_gen/python-lib/README.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Generators
|
||||
This folder contains an attempt at creating apps & similar from python libraries
|
||||
|
||||
## Howto
|
||||
|
26
shuffle/backend/app_gen/python-lib/baseline/Dockerfile
Normal file
26
shuffle/backend/app_gen/python-lib/baseline/Dockerfile
Normal file
@ -0,0 +1,26 @@
|
||||
# Base our app image off of the WALKOFF App SDK image
|
||||
FROM frikky/shuffle:app_sdk as base
|
||||
|
||||
# We're going to stage away all of the bloat from the build tools so lets create a builder stage
|
||||
FROM base as builder
|
||||
|
||||
# Install all alpine build tools needed for our pip installs
|
||||
RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev
|
||||
|
||||
# Install all of our pip packages in a single directory that we can copy to our base image later
|
||||
RUN mkdir /install
|
||||
WORKDIR /install
|
||||
COPY requirements.txt /requirements.txt
|
||||
RUN pip install --prefix="/install" -r /requirements.txt
|
||||
|
||||
# Switch back to our base image and copy in all of our built packages and source code
|
||||
FROM base
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY src /app
|
||||
|
||||
# Install any binary dependencies needed in our final image - this can be a lot of different stuff
|
||||
RUN apk --no-cache add --update libmagic
|
||||
|
||||
# Finally, lets run our app!
|
||||
WORKDIR /app
|
||||
CMD python app.py --log-level DEBUG
|
@ -0,0 +1,14 @@
|
||||
version: '3.4'
|
||||
services:
|
||||
thehive4py:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
env_file:
|
||||
- env.txt
|
||||
restart: "no"
|
||||
deploy:
|
||||
mode: replicated
|
||||
replicas: 10
|
||||
restart_policy:
|
||||
condition: none
|
4
shuffle/backend/app_gen/python-lib/baseline/env.txt
Normal file
4
shuffle/backend/app_gen/python-lib/baseline/env.txt
Normal file
@ -0,0 +1,4 @@
|
||||
REDIS_URI=redis://redis
|
||||
REDIS_ACTION_RESULT_CH=action-results
|
||||
REDIS_ACTION_RESULTS_GROUP=action-results-group
|
||||
APP_NAME=
|
@ -0,0 +1 @@
|
||||
# No extra requirements needed
|
525
shuffle/backend/app_gen/python-lib/generator.py
Normal file
525
shuffle/backend/app_gen/python-lib/generator.py
Normal file
@ -0,0 +1,525 @@
|
||||
# Read a directory
|
||||
# Find python functions
|
||||
# Generate yaml
|
||||
|
||||
# FIXME:
|
||||
# Position, default_value and function in params
|
||||
|
||||
|
||||
|
||||
# TO ADD:
|
||||
# from walkoff_app_sdk.app_base import AppBase
|
||||
# class TheHive(AppBase): <-- Add appbase
|
||||
# __version__ = version within class
|
||||
# app_name = app_name in class
|
||||
# if __name__ == "__main__":
|
||||
# asyncio.run(TheHive.run(), debug=True) <-- APPEND SHIT HERE
|
||||
# async infront of every function?
|
||||
# Add async library to imports
|
||||
|
||||
# Make wrapper class? <-- within app.py
|
||||
|
||||
|
||||
# 1. Generate app.yaml (functions with returns etc)
|
||||
# 2. Generate app.py (with imports to the original function etc
|
||||
# 3. Build requirements.txt based on the items necessary
|
||||
# 4. Check whether it runs?
|
||||
|
||||
import os
|
||||
import yaml
|
||||
import jedi
|
||||
import shutil
|
||||
|
||||
# Testing generator
|
||||
entrypoint_directory = "thehive4py"
|
||||
include_requirements = False
|
||||
if not os.path.exists(entrypoint_directory):
|
||||
include_requirements = True
|
||||
print("Requires library in requirements")
|
||||
|
||||
|
||||
source = '''
|
||||
import %s
|
||||
%s.
|
||||
''' % (entrypoint_directory, entrypoint_directory)
|
||||
splitsource = source.split("\n")
|
||||
|
||||
# Find modules AKA files
|
||||
def get_modules():
|
||||
curline = splitsource[-2]
|
||||
print(splitsource, curline)
|
||||
entrypoint = jedi.Script(source, line=len(splitsource)-1, column=len(curline))
|
||||
|
||||
modules = []
|
||||
completions = entrypoint.completions()
|
||||
for item in completions:
|
||||
if item.type != "module":
|
||||
continue
|
||||
|
||||
|
||||
modules.append(item.name)
|
||||
|
||||
return modules
|
||||
|
||||
def loop_modules(modules, data):
|
||||
# Loop modules AKA files - this is garbage but works lmao
|
||||
for module in modules:
|
||||
modulesplit = list(splitsource)
|
||||
modulesplit[2] = "%s%s." % (modulesplit[2], module)
|
||||
|
||||
#print(modulesplit)
|
||||
source = "\n".join(modulesplit)
|
||||
entrypoint = jedi.Script(source, line=len(modulesplit)-1, column=len(modulesplit[2]))
|
||||
|
||||
# Loop classes in the files
|
||||
for classcompletion in entrypoint.completions():
|
||||
if classcompletion.type != "class":
|
||||
continue
|
||||
|
||||
if not classcompletion.full_name.startswith(modulesplit[2]):
|
||||
continue
|
||||
|
||||
# Same thing again, but for functions within classes
|
||||
# CBA with subclasses etc atm
|
||||
|
||||
#print(classcompletion.full_name, modulesplit[2])
|
||||
|
||||
classplit = list(modulesplit)
|
||||
classplit[2] = "%s." % (classcompletion.full_name)
|
||||
|
||||
#print(modulesplit)
|
||||
source = "\n".join(classplit)
|
||||
entrypoint = jedi.Script(source, line=len(classplit)-1, column=len(classplit[2]))
|
||||
|
||||
# List of functions sorted by their name
|
||||
nameinternalfunctions = []
|
||||
for functioncompletion in entrypoint.completions():
|
||||
if functioncompletion.type != "function":
|
||||
continue
|
||||
|
||||
if not functioncompletion.full_name.startswith(classplit[2]):
|
||||
continue
|
||||
|
||||
nameinternalfunctions.append(functioncompletion)
|
||||
|
||||
#print(nameinternalfunctions)
|
||||
|
||||
# List of functions sorted by their line in the file (reversed)
|
||||
# CODE USED TO ACTUALLY PRINT THE CODE
|
||||
|
||||
#prevnumber = 0
|
||||
#numberinternalfunctions = sorted(nameinternalfunctions, key=lambda k: k.line, reverse=True)
|
||||
numberinternalfunctions = sorted(nameinternalfunctions, key=lambda k: k.line)
|
||||
prevnumber = 0
|
||||
|
||||
origparent = "TheHiveApi"
|
||||
# Predefined functions? - maybe skip: __init__
|
||||
skip_functions = ["__init__"]
|
||||
skip_parameters = [""]
|
||||
cnt = 0
|
||||
for item in numberinternalfunctions:
|
||||
if item.parent().name != origparent:
|
||||
continue
|
||||
|
||||
# FIXME - prolly wrong
|
||||
if item.name in skip_functions or (item.name.startswith("__") and item.name.endswith("__")):
|
||||
continue
|
||||
|
||||
# FIXME - remove
|
||||
#print(item.get_line_code())
|
||||
#if "=" not in item.get_line_code():
|
||||
# continue
|
||||
|
||||
#if item.docstring() in item.get_line_code():
|
||||
# print("NO DOCSTRING FOR: %s. Skipping!" % item.name)
|
||||
# cnt += 1
|
||||
# continue
|
||||
|
||||
curfunction = {
|
||||
"name": item.name,
|
||||
"description": "HEY",
|
||||
}
|
||||
|
||||
params = []
|
||||
curreturn = {}
|
||||
|
||||
function = item.docstring().split("\n")[0]
|
||||
for line in item.docstring().split("\n"):
|
||||
if not line:
|
||||
continue
|
||||
|
||||
linesplit = line.split(" ")
|
||||
try:
|
||||
curname = linesplit[1][:-1]
|
||||
except IndexError as e:
|
||||
print("IndexError: %s. Line: %s" % (e, line))
|
||||
continue
|
||||
|
||||
paramfound = False
|
||||
foundindex = 0
|
||||
cnt = 0
|
||||
for param in params:
|
||||
#print(param["name"], curname)
|
||||
if param["name"] == curname:
|
||||
#print("ALREADY EXISTS: %s" % curname)
|
||||
paramfound = True
|
||||
foundindex = cnt
|
||||
break
|
||||
|
||||
cnt += 1
|
||||
|
||||
# CBA finding a good parser, as that seemed impossible :(
|
||||
# Skipped :return
|
||||
if line.startswith(":param"):
|
||||
if not paramfound:
|
||||
#print("HERE!: %s" % line)
|
||||
|
||||
curparam = {}
|
||||
#print(line)
|
||||
curparam["name"] = curname
|
||||
curparam["description"] = " ".join(linesplit[2:])
|
||||
#print(curparam["description"])
|
||||
if "\r\n" in curparam["description"]:
|
||||
curparam["description"] = " ".join(curparam["description"].split("\r\n"))
|
||||
if "\n" in curparam["description"]:
|
||||
curparam["description"] = " ".join(curparam["description"].split("\n"))
|
||||
|
||||
curparam["function"] = function
|
||||
|
||||
#curparam["docstring"] = item.docstring()
|
||||
params.append(curparam)
|
||||
elif line.startswith(":type"):
|
||||
if paramfound:
|
||||
params[foundindex]["schema"] = {}
|
||||
params[foundindex]["schema"]["type"] = " ".join(linesplit[2:])
|
||||
#print(params)
|
||||
|
||||
#print(line)
|
||||
elif line.startswith(":rtype"):
|
||||
curreturn["type"] = " ".join(linesplit[1:])
|
||||
|
||||
|
||||
# Check whether param is required
|
||||
# FIXME - remove
|
||||
#if len(params) != 0:
|
||||
# print(params)
|
||||
# continue
|
||||
|
||||
#print(function)
|
||||
#print(params)
|
||||
|
||||
# FIXME - this might crash when missing docstrings
|
||||
# FIXME - is also bad splitt (can be written without e.g. spaces
|
||||
# This should maybe be done first? idk
|
||||
fields = function.split("(")[1][:-1].split(", ")
|
||||
if len(params) == 0:
|
||||
# Handle missing docstrings
|
||||
params = []
|
||||
for item in fields:
|
||||
params.append({
|
||||
"name": item,
|
||||
"description": "",
|
||||
"schema": {},
|
||||
"function": function,
|
||||
})
|
||||
|
||||
cnt = 0
|
||||
for param in params:
|
||||
found = False
|
||||
|
||||
for field in fields:
|
||||
if param["name"] in field:
|
||||
if "=" in field:
|
||||
param["required"] = False
|
||||
param["default_value"] = field
|
||||
else:
|
||||
param["required"] = True
|
||||
|
||||
found = True
|
||||
break
|
||||
|
||||
if not param.get("schema"):
|
||||
#print("Defining object schema for %s" % param["name"])
|
||||
param["schema"] = {}
|
||||
param["schema"]["type"] = "object"
|
||||
|
||||
param["position"] = cnt
|
||||
|
||||
if not found:
|
||||
# FIXME - what here?
|
||||
pass
|
||||
#print("HANDLE NOT FOUND")
|
||||
#print(param)
|
||||
#print(fields)
|
||||
|
||||
cnt += 1
|
||||
|
||||
if len(params) > 0:
|
||||
curfunction["parameters"] = params
|
||||
|
||||
if not curfunction.get("returns"):
|
||||
curfunction["returns"] = {}
|
||||
curfunction["returns"]["schema"] = {}
|
||||
curfunction["returns"]["schema"]["type"] = "object"
|
||||
|
||||
#print(curfunction)
|
||||
try:
|
||||
print("Finished prepping %s with %d parameters and return %s" % (item.name, len(curfunction["parameters"]), curfunction["returns"]["schema"]["type"]))
|
||||
except KeyError as e:
|
||||
print("Error: %s" % e)
|
||||
#print("Finished prepping %s with 0 parameters and return %s" % (item.name, curfunction["returns"]["schema"]["type"]))
|
||||
curfunction["parameters"] = []
|
||||
except AttributeError as e:
|
||||
pass
|
||||
|
||||
try:
|
||||
data["actions"].append(curfunction)
|
||||
except KeyError:
|
||||
data["actions"] = []
|
||||
data["actions"].append(curfunction)
|
||||
|
||||
#return data
|
||||
|
||||
# FIXME
|
||||
#if cnt == breakcnt:
|
||||
# break
|
||||
|
||||
#cnt += 1
|
||||
|
||||
# Check if
|
||||
|
||||
|
||||
# THIS IS TO GET READ THE ACTUAL CODE
|
||||
#functioncode = item.get_line_code(after=prevnumber-item.line-1)
|
||||
#prevnumber = item.line
|
||||
|
||||
# break
|
||||
return data
|
||||
|
||||
# Generates the base information necessary to make an api.yaml file
|
||||
def generate_base_yaml(filename, version, appname):
|
||||
print("Generating base app for library %s with version %s" % (appname, version))
|
||||
data = {
|
||||
"walkoff_version": "0.0.1",
|
||||
"app_version": version,
|
||||
"name": appname,
|
||||
"description": "Autogenerated yaml with @Frikkylikeme's generator",
|
||||
"contact_info": {
|
||||
"name": "@frikkylikeme",
|
||||
"url": "https://github.com/frikky",
|
||||
}
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
def generate_app(filepath, data):
|
||||
|
||||
tbd = [
|
||||
"library_path",
|
||||
"import_class",
|
||||
"required_init"
|
||||
]
|
||||
|
||||
# FIXME - add to data dynamically and remove
|
||||
data["library_path"] = "thehive4py.api"
|
||||
data["import_class"] = "TheHiveApi"
|
||||
data["required_init"] = {"url": "http://localhost:9000", "principal": "asd"}
|
||||
|
||||
wrapperstring = ""
|
||||
cnt = 0
|
||||
# FIXME - only works for strings currently
|
||||
for key, value in data["required_init"].items():
|
||||
if cnt != len(data["required_init"]):
|
||||
wrapperstring += "%s=\"%s\", " % (key, value)
|
||||
|
||||
cnt += 1
|
||||
|
||||
wrapperstring = wrapperstring[:-2]
|
||||
wrapper = "self.wrapper = %s(%s)" % (data["import_class"], wrapperstring)
|
||||
|
||||
name = data["name"]
|
||||
if ":" in data["name"]:
|
||||
name = data["name"].split(":")[0]
|
||||
|
||||
if not data.get("actions"):
|
||||
print("No actions found for %s in path %s" % (entrypoint_directory, data["library_path"]))
|
||||
print("Folder might be missing (or unexported (__init__.py), library not installed (pip) or library action missing")
|
||||
exit()
|
||||
|
||||
functions = []
|
||||
for action in data["actions"]:
|
||||
internalparamstring = ""
|
||||
paramstring = ""
|
||||
try:
|
||||
for param in action["parameters"]:
|
||||
if param["required"] == False:
|
||||
paramstring += "%s, " % (param["default_value"])
|
||||
else:
|
||||
paramstring += "%s, " % param["name"]
|
||||
except KeyError:
|
||||
action["parameters"] = []
|
||||
|
||||
#internalparamstring += "%s, " % param["name"]
|
||||
|
||||
paramstring = paramstring[:-2]
|
||||
#internalparamstring = internalparamstring[:-2]
|
||||
|
||||
functionstring = ''' async def %s(%s):
|
||||
return self.wrapper.%s(%s)
|
||||
''' % (action["name"], paramstring, action["name"], paramstring)
|
||||
|
||||
functions.append(functionstring)
|
||||
|
||||
filedata = '''from walkoff_app_sdk.app_base import AppBase
|
||||
import asyncio
|
||||
|
||||
from %s import %s
|
||||
|
||||
class %sWrapper(AppBase):
|
||||
|
||||
__version__ = "%s"
|
||||
app_name = "%s"
|
||||
|
||||
def __init__(self, redis, logger, console_logger=None):
|
||||
"""
|
||||
Each app should have this __init__ to set up Redis and logging.
|
||||
:param redis:
|
||||
:param logger:
|
||||
:param console_logger:
|
||||
"""
|
||||
|
||||
super().__init__(redis, logger, console_logger)
|
||||
%s
|
||||
|
||||
%s
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(%sWrapper.run(), debug=True)
|
||||
''' % ( \
|
||||
data["library_path"],
|
||||
data["import_class"],
|
||||
name,
|
||||
data["app_version"],
|
||||
name,
|
||||
wrapper,
|
||||
"\n".join(functions),
|
||||
name
|
||||
)
|
||||
|
||||
# Simple key cleanup
|
||||
for item in tbd:
|
||||
try:
|
||||
del data[item]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
tbd_action = []
|
||||
|
||||
tbd_param = [
|
||||
"position",
|
||||
"default_value",
|
||||
"function"
|
||||
]
|
||||
|
||||
for action in data["actions"]:
|
||||
for param in action["parameters"]:
|
||||
for item in tbd_param:
|
||||
try:
|
||||
del param[item]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for item in tbd_action:
|
||||
try:
|
||||
del action[item]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# FIXME - add how to initialize the class
|
||||
with open(filepath, "w") as tmp:
|
||||
tmp.write(filedata)
|
||||
|
||||
return data
|
||||
|
||||
def dump_yaml(filename, data):
|
||||
with open(filename, 'w') as outfile:
|
||||
yaml.dump(data, outfile, default_flow_style=False)
|
||||
|
||||
def build_base_structure(appname, version):
|
||||
outputdir = "generated"
|
||||
app_path = "%s/%s" % (outputdir, appname)
|
||||
filepath = "%s/%s" % (app_path, version)
|
||||
srcdir_path = "%s/src" % (filepath)
|
||||
|
||||
directories = [
|
||||
outputdir,
|
||||
app_path,
|
||||
filepath,
|
||||
srcdir_path
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
try:
|
||||
os.mkdir(directory)
|
||||
except FileExistsError:
|
||||
print("%s already exists. Skipping." % directory)
|
||||
|
||||
# "docker-compose.yml",
|
||||
# "env.txt",
|
||||
filenames = [
|
||||
"Dockerfile",
|
||||
"requirements.txt"
|
||||
]
|
||||
|
||||
#if strings.
|
||||
# include_requirements = False
|
||||
|
||||
for filename in filenames:
|
||||
ret = shutil.copyfile("baseline/%s" % filename, "%s/%s" % (filepath, filename))
|
||||
print("Copied baseline/%s." % filename)
|
||||
|
||||
def move_files(appname, version):
|
||||
applocation = "../../functions/apps/%s" % appname
|
||||
if not os.path.exists("../../functions/apps"):
|
||||
os.mkdir("../../functions/apps")
|
||||
|
||||
if not os.path.exists(applocation):
|
||||
os.mkdir(applocation)
|
||||
|
||||
versionlocation = "%s/%s" % (applocation, version)
|
||||
if not os.path.exists(versionlocation):
|
||||
os.mkdir(versionlocation)
|
||||
|
||||
shutil.rmtree(versionlocation)
|
||||
shutil.move("generated/%s/%s" % (appname, version), versionlocation)
|
||||
|
||||
print("\nMoved files to %s" % versionlocation)
|
||||
|
||||
|
||||
def main():
|
||||
appname = entrypoint_directory
|
||||
version = "0.0.1"
|
||||
output_path = "generated/%s/%s" % (appname, version)
|
||||
api_yaml_path = "%s/api.yaml" % (output_path)
|
||||
app_python_path = "%s/src/app.py" % (output_path)
|
||||
|
||||
# Builds the directory structure for the app
|
||||
build_base_structure(appname, version)
|
||||
|
||||
# Generates the yaml based on input library etc
|
||||
data = generate_base_yaml(api_yaml_path, version, appname)
|
||||
modules = get_modules()
|
||||
data = loop_modules(modules, data)
|
||||
|
||||
# Generates app file
|
||||
data = generate_app(app_python_path, data)
|
||||
|
||||
# Dumps the yaml to specified directory
|
||||
dump_yaml(api_yaml_path, data)
|
||||
|
||||
# Move the file to functions/apps repository
|
||||
move_files(appname, version)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
2
shuffle/backend/app_gen/python-lib/requirements.txt
Normal file
2
shuffle/backend/app_gen/python-lib/requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
jedi
|
||||
pyyaml
|
Reference in New Issue
Block a user