Skip to content

Commit

Permalink
Parentsparent (#190)
Browse files Browse the repository at this point in the history
feature: add support for loading parent's parent (and so on) and loading import from multiple files

- can import files with relative name
- can import files with absolute name
- can import files recursively
- can import files with extension skipped
- can give position info for import
* Fix test cases, recursive test cases, import test cases
* Refactor: rename Allhttp to MultidefHttp
  • Loading branch information
cedric05 committed Oct 18, 2023
1 parent 2fdeb9c commit 3287f6d
Show file tree
Hide file tree
Showing 17 changed files with 685 additions and 467 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ jobs:
# unit tests need python-magic
# package in pypi woudn't need python-magic
python -m pip install python-magic
# for integration tests
docker run -d -p 8000:80 kennethreitz/httpbin
python -m unittest
- name: Install dependencies
run: |
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.11
FROM python:3.8
LABEL io.whalebrew.config.networks '["host"]'
ADD requirements.txt /app/
WORKDIR /app
Expand Down
6 changes: 3 additions & 3 deletions dotextensions/server/handlers/basic_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from requests import RequestException

from dothttp import DotHttpException, Config, HttpDef, Allhttp, BaseModelProcessor, UndefinedHttpToExtend, js3py
from dothttp import DotHttpException, Config, HttpDef, MultidefHttp, BaseModelProcessor, UndefinedHttpToExtend, js3py
from dothttp.request_base import CurlCompiler, RequestCompiler, HttpFileFormatter, dothttp_model
from dothttp.__version__ import __version__
from dothttp.parse_models import ScriptType
Expand Down Expand Up @@ -146,7 +146,7 @@ def get_request_comp(self, config):

@staticmethod
def get_http_from_req(request: HttpDef):
http_def = Allhttp([request.get_http_from_req()])
http_def = MultidefHttp(import_list=[],allhttps=[request.get_http_from_req()])
return HttpFileFormatter.format(http_def)


Expand Down Expand Up @@ -183,7 +183,7 @@ def select_target(self):
for context in self.args.contexts:
try:
# if model is generated, try to figure out target
model: Allhttp = dothttp_model.model_from_str(context)
model: MultidefHttp = dothttp_model.model_from_str(context)
# by including targets in to model
self.model.allhttps = self.model.allhttps + model.allhttps
self.content += context + "\n\n" + context
Expand Down
12 changes: 8 additions & 4 deletions dotextensions/server/handlers/gohandler.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
from typing import Union, Any

from dothttp import Allhttp, BaseModelProcessor, Http
from dothttp import MultidefHttp, BaseModelProcessor, Http
from dothttp.request_base import dothttp_model
from ..models import Command, Result, BaseHandler, DothttpTypes

Expand Down Expand Up @@ -33,17 +33,21 @@ def run(self, command: Command) -> Result:
return Result(id=command.id,
result={"error_message": f"content should be string", "error": True})
if filename:
model: Allhttp = dothttp_model.model_from_file(filename)
model: MultidefHttp = dothttp_model.model_from_file(filename)
else:
model: Allhttp = dothttp_model.model_from_str(content)
model: MultidefHttp = dothttp_model.model_from_str(content)
try:
return Result(id=command.id, result=self.figure_n_get(model, position))
except Exception as e:
return Result(id=command.id, result={"error_message": f"unknown Exception {e}", "error": True})

def figure_n_get(self, model: Allhttp, position: int) -> dict:
def figure_n_get(self, model: MultidefHttp, position: int) -> dict:
if self.is_in_between(model, position):
index = 0
if model.import_list:
for index, import_file in enumerate(model.import_list.filename):
if self.is_in_between(import_file, position):
return {"type": DothttpTypes.IMPORT.value, "filename": import_file.value}
for index, pick_http in enumerate(model.allhttps):
if self.is_in_between(pick_http, position):
if dot_type := self.pick_in_http(pick_http, position):
Expand Down
4 changes: 2 additions & 2 deletions dotextensions/server/handlers/har2httphandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

import requests

from dothttp import HttpDef, APPLICATION_JSON, FORM_URLENCODED, Allhttp, Payload, Http, MULTIPART_FORM_INPUT
from dothttp import HttpDef, APPLICATION_JSON, FORM_URLENCODED, MultidefHttp, Payload, Http, MULTIPART_FORM_INPUT
from dothttp.parse_models import HttpFileType
from dothttp.request_base import HttpFileFormatter
from . import logger
Expand Down Expand Up @@ -133,7 +133,7 @@ def run(self, command: Command) -> Result:
save_filename = get_alternate_filename(save_filename)
save_filename.parent.mkdir(parents=True, exist_ok=True)
with open(save_filename, 'w') as f:
output = HttpFileFormatter.format(Allhttp(allhttps=http_list), filetype=filetype)
output = HttpFileFormatter.format(MultidefHttp(allhttps=http_list, import_list=[]), filetype=filetype)
f.write(output)
return Result.get_result(command, {"http": output, "filename": str(save_filename)})
# return Result.to_error(command, "har file has not requests")
6 changes: 3 additions & 3 deletions dotextensions/server/handlers/http2postman.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
except:
import json

try:
from requests_hawk import HawkAuth as RequestsHawkAuth
try:
from requests_hawk import HawkAuth as RequestsHawkAuth
except:
RequestsHawkAuth = None

Expand Down Expand Up @@ -234,7 +234,7 @@ def get_http_to_postman_request(http: HttpDef, description="") -> RequestClass:
key="hawkId",
value=hawk_id,
type="string"
),
),
ApikeyElement(key="authKey", value=hawk_key, type="string"),
ApikeyElement(key="algorithm", value=hawk_algorithm, type="string")]
elif isinstance(auth, AWS4Auth):
Expand Down
4 changes: 2 additions & 2 deletions dotextensions/server/handlers/postman2http.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import requests

from dothttp import Allhttp, Http, NameWrap, UrlWrap, Line, Query, Header, AuthWrap, BasicAuth, DigestAuth, \
from dothttp import MultidefHttp, Http, NameWrap, UrlWrap, Line, Query, Header, AuthWrap, BasicAuth, DigestAuth, \
MultiPartFile, FilesWrap, TripleOrDouble, Certificate
from dothttp.parse_models import NtlmAuthWrap, Payload, AwsAuthWrap, HttpFileType, HawkAuth
from dothttp.request_base import HttpFileFormatter
Expand Down Expand Up @@ -36,7 +36,7 @@ def get_method(self):
def import_requests_into_dire(items: Iterator[Items], directory: Path, auth: Optional[Auth],
variable: Union[None, List[Variable]], filetype: HttpFileType,
link: str):
collection = Allhttp(allhttps=[])
collection = MultidefHttp(import_list=[], allhttps=[])
base_auth_http = None
if auth:
base_inherit_auth_wrap, lines = ImportPostmanCollection.get_auth_wrap(auth)
Expand Down
1 change: 1 addition & 0 deletions dotextensions/server/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,3 +53,4 @@ class DothttpTypes(Enum):
OUTPUT = "output"
SCRIPT = "script"
COMMENT = "comment"
IMPORT = "import"
120 changes: 86 additions & 34 deletions dothttp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
from requests.auth import HTTPBasicAuth, HTTPDigestAuth, AuthBase
from requests.structures import CaseInsensitiveDict

try:
from requests_hawk import HawkAuth as RequestsHawkAuth
try:
from requests_hawk import HawkAuth as RequestsHawkAuth
except:
RequestsHawkAuth = None

Expand All @@ -42,7 +42,7 @@

from .dsl_jsonparser import json_or_array_to_json
from .exceptions import *
from .parse_models import Allhttp, AuthWrap, DigestAuth, BasicAuth, Line, NtlmAuthWrap, Query, Http, NameWrap, UrlWrap, Header, \
from .parse_models import MultidefHttp, AuthWrap, DigestAuth, BasicAuth, Line, NtlmAuthWrap, Query, Http, NameWrap, UrlWrap, Header, \
MultiPartFile, FilesWrap, TripleOrDouble, Payload as ParsePayload, Certificate, P12Certificate, ExtraArg, \
AWS_REGION_LIST, AWS_SERVICES_LIST, AwsAuthWrap, TestScript, ScriptType, HawkAuth
from .property_schema import property_schema
Expand Down Expand Up @@ -425,6 +425,7 @@ def __init__(self, args: Config):
def load(self):
self.load_content()
self.load_model()
self.load_imports()
self.load_properties_n_headers()
self.load_command_line_props()
self.validate_names()
Expand Down Expand Up @@ -453,7 +454,38 @@ def load_model(self):
raise HttpFileSyntaxException(file=self.file, message=e.args)
except Exception as e:
raise HttpFileException(message=e.args)
self.model: Allhttp = model
self.model: MultidefHttp = model


def load_imports(self):
import_list = []
BaseModelProcessor._load_imports(self.model, self.file, self.property_util, import_list)
self.model.allhttps += import_list

def _load_imports(model: MultidefHttp, filename: str, property_util: PropertyProvider, import_list: List[Http]):
if not model.import_list:
return
for filename_string in model.import_list.filename:
import_file = filename_string.value
if not os.path.isabs(import_file):
import_file = os.path.join(os.path.dirname(os.path.realpath(filename)), import_file)
if not os.path.isfile(import_file):
if os.path.isfile(import_file + '.http'):
import_file += '.http'
else:
raise HttpFileException(message=f"import file should be a file, current: {import_file}")
with open(import_file, 'r', encoding="utf-8") as f:
imported_content = f.read()
try:
imported_model = dothttp_model.model_from_str(imported_content)
import_list += imported_model.allhttps
property_util.add_infile_properties(imported_content)
BaseModelProcessor._load_imports(imported_model, import_file, property_util, import_list)
except TextXSyntaxError as e:
raise HttpFileSyntaxException(file=import_file, message=e.args)
except Exception as e:
raise HttpFileException(message=e.args)
return

def load_content(self):
if not os.path.exists(self.file):
Expand All @@ -472,16 +504,21 @@ def select_target(self):
self.http = self.get_target(target, self.model.allhttps)
else:
self.http = self.model.allhttps[0]
self.base_http = None
self.parents_http = []
if self.http.namewrap and self.http.namewrap.base:
base = self.http.namewrap.base
if base == self.http.namewrap.name:
parent = self.http.namewrap.base
if parent == self.http.namewrap.name:
raise ParameterException(message="target and base should not be equal", key=target,
value=base)
value=parent)
try:
self.base_http = self.get_target(base, self.model.allhttps)
while parent:
if parent in self.parents_http:
raise ParameterException(message="Found circular reference", target=self.http.namewrap.name)
grand_http = self.get_target(parent, self.model.allhttps)
self.parents_http.append(grand_http)
parent = grand_http.namewrap.base
except Exception:
raise UndefinedHttpToExtend(target=self.http.namewrap.name, base=base)
raise UndefinedHttpToExtend(target=self.http.namewrap.name, base=parent)

@staticmethod
def get_target(target: Union[str, int], http_def_list: List[Http]):
Expand Down Expand Up @@ -514,7 +551,10 @@ def validate_names(self):
names.append(str(index + 1))

def load_props_needed_for_content(self):
self.property_util.add_infile_properties(self.content)
self._load_props_from_content(self.content, self.property_util)

def _load_props_from_content(self, content, property_util: PropertyProvider):
property_util.add_infile_properties(content)


class HttpDefBase(BaseModelProcessor):
Expand All @@ -525,6 +565,10 @@ def __init__(self, args: Config):

def load_query(self):
params: DefaultDict[List] = defaultdict(list)
for parent in self.parents_http:
for line in parent.lines:
if query := line.query:
params[self.get_updated_content(query.key)].append(self.get_updated_content(query.value))
for line in self.http.lines:
if query := line.query:
params[self.get_updated_content(query.key)].append(self.get_updated_content(query.value))
Expand All @@ -550,7 +594,8 @@ def load_headers(self):
## having duplicate headers creates problem while exporting to curl,postman import..
headers = CaseInsensitiveDict()
headers.update(self.default_headers)
self.load_headers_to_dict(self.base_http, headers)
for parent in self.parents_http:
self.load_headers_to_dict(parent, headers)
self.load_headers_to_dict(self.http, headers)
request_logger.debug(
f'computed query params from `{self.file}` are `{headers}`')
Expand Down Expand Up @@ -581,9 +626,11 @@ def load_extra_flags(self):
# flags are extendable
# once its marked as allow insecure
# user would want all child to have same effect
extra_args = self.http.extra_args
if self.base_http and self.base_http.extra_args:
extra_args += self.base_http.extra_args
extra_args = list(self.http.extra_args)
if self.parents_http:
for parent in self.parents_http:
if parent.extra_args:
extra_args += parent.extra_args
if extra_args:
for flag in extra_args:
if flag.clear:
Expand All @@ -595,21 +642,24 @@ def load_url(self):
request_logger.debug(
f'url is {self.http.urlwrap.url}')
url_path = self.get_updated_content(self.http.urlwrap.url)
if base_http := self.base_http:
base_url = self.get_updated_content(base_http.urlwrap.url)
if not url_path:
self.httpdef.url = base_url
elif url_path.startswith("http://") or url_path.startswith("https://") or url_path.startswith(
"http+unix://"):
self.httpdef.url = url_path
elif base_url.endswith("/") and url_path.startswith("/"):
self.httpdef.url = urljoin(base_url, url_path[1:])
elif url_path.startswith("/"):
self.httpdef.url = urljoin(base_url + "/", url_path[1:])
elif not base_url.endswith("/") and not url_path.startswith("/"):
self.httpdef.url = urljoin(base_url + "/", url_path)
else:
self.httpdef.url = urljoin(base_url, url_path)
if self.parents_http:
for base_http in self.parents_http:
base_url = self.get_updated_content(base_http.urlwrap.url)
if not url_path:
url = base_url
elif url_path.startswith("http://") or url_path.startswith("https://") or url_path.startswith(
"http+unix://"):
url = url_path
elif base_url.endswith("/") and url_path.startswith("/"):
url = urljoin(base_url, url_path[1:])
elif url_path.startswith("/"):
url = urljoin(base_url + "/", url_path[1:])
elif not base_url.endswith("/") and not url_path.startswith("/"):
url = urljoin(base_url + "/", url_path)
else:
url = urljoin(base_url, url_path)
url_path = url
self.httpdef.url = url_path
else:
self.httpdef.url = url_path
if self.httpdef.url and not (
Expand Down Expand Up @@ -751,15 +801,15 @@ def load_auth(self):
self.get_updated_content(
digest_auth.password))
elif ntlm_auth := auth_wrap.ntlm_auth:
self.httpdef.auth = HttpNtlmAuth(self.get_updated_content(ntlm_auth.username),
self.httpdef.auth = HttpNtlmAuth(self.get_updated_content(ntlm_auth.username),
self.get_updated_content(ntlm_auth.password))
elif hawk_auth := auth_wrap.hawk_auth:
if hawk_auth.algorithm:
algorithm = hawk_auth.algorithm
else:
algorithm = "sha256"
self.httpdef.auth = RequestsHawkAuth(
id=self.get_updated_content(hawk_auth.id),
id=self.get_updated_content(hawk_auth.id),
key=self.get_updated_content(hawk_auth.key),
algorithm=self.get_updated_content(algorithm))
elif aws_auth_wrap := auth_wrap.aws_auth:
Expand Down Expand Up @@ -859,8 +909,10 @@ def load_auth(self):
def get_current_or_base(self, attr_key) -> Any:
if getattr(self.http, attr_key):
return getattr(self.http, attr_key)
elif self.base_http:
return getattr(self.base_http, attr_key)
elif self.parents_http:
for parent in self.parents_http:
if getattr(parent, attr_key):
return getattr(parent, attr_key)

def load_def(self):
if self._loaded:
Expand Down
2 changes: 1 addition & 1 deletion dothttp/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.0.42.a10'
__version__ = '0.0.42a11'
6 changes: 4 additions & 2 deletions dothttp/http.tx
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
//HTTP: ram=HTTP2
MULTISET: allhttps=HTTP+;
MULTISET: (import_list=IMPORT)? allhttps=HTTP+;

IMPORT: ('import' filename=String ';')* ;

HTTP:
(
Expand Down Expand Up @@ -230,4 +232,4 @@ SCRIPT_LANGUAGE:

SLASH:
'\'
;
;
Loading

0 comments on commit 3287f6d

Please sign in to comment.