Skip to content

Commit

Permalink
future library ported to python3 (#2)
Browse files Browse the repository at this point in the history
  • Loading branch information
DustinKLo authored Oct 8, 2019
1 parent 3e9dbec commit 72a353c
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 5 deletions.
7 changes: 5 additions & 2 deletions crawl_cals.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@
Crawl calibration files, create and ingest calibration datasets.
"""

from future import standard_library
standard_library.install_aliases()
from builtins import str
import os, sys, re, json, logging, traceback, requests, argparse, backoff, shutil
from datetime import datetime
from requests.packages.urllib3.exceptions import (InsecureRequestWarning,
InsecurePlatformWarning)
try: from html.parser import HTMLParser
except: from HTMLParser import HTMLParser
except: from html.parser import HTMLParser

from osaka.main import get, rmall

Expand Down Expand Up @@ -194,7 +197,7 @@ def create_active_cal_ds(active_ids, dataset_version, root_ds_dir="."):
ds_id = id
root_ds_dir = os.path.abspath(root_ds_dir)
ds_dir = os.path.join(root_ds_dir, ds_id)
if not os.path.isdir(ds_dir): os.makedirs(ds_dir, 0755)
if not os.path.isdir(ds_dir): os.makedirs(ds_dir, 0o755)

# dump dataset and met JSON
ds_file = os.path.join(ds_dir, "%s.dataset.json" % ds_id)
Expand Down
5 changes: 4 additions & 1 deletion crawl_orbits.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,16 @@
Crawl orbits and submit orbit dataset generation jobs.
"""

from future import standard_library
standard_library.install_aliases()
from builtins import str
import os, sys, re, json, logging, traceback, requests, argparse, backoff
from datetime import datetime
from pprint import pformat
from requests.packages.urllib3.exceptions import (InsecureRequestWarning,
InsecurePlatformWarning)
try: from html.parser import HTMLParser
except: from HTMLParser import HTMLParser
except: from html.parser import HTMLParser

from hysds_commons.job_utils import submit_mozart_job
from hysds.celery import app
Expand Down
3 changes: 2 additions & 1 deletion create_cal_ds.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Create a HySDS dataset from a Sentinel1 calibration tar file.
"""

from builtins import str
import os, sys, time, re, json, requests, shutil, logging, traceback, argparse, backoff
from requests.packages.urllib3.exceptions import (InsecureRequestWarning,
InsecurePlatformWarning)
Expand Down Expand Up @@ -82,7 +83,7 @@ def create_dataset(ds, met, cal_tar_file, root_ds_dir="."):
id = met['data_product_name']
root_ds_dir = os.path.abspath(root_ds_dir)
ds_dir = os.path.join(root_ds_dir, id)
if not os.path.isdir(ds_dir): os.makedirs(ds_dir, 0755)
if not os.path.isdir(ds_dir): os.makedirs(ds_dir, 0o755)

# dump dataset and met JSON
ds_file = os.path.join(ds_dir, "%s.dataset.json" % id)
Expand Down
3 changes: 2 additions & 1 deletion create_orbit_ds.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Create a HySDS dataset from a Sentinel1 precise or restituted orbit.
"""

from builtins import str
import os, sys, time, re, json, requests, shutil, logging, traceback, argparse
from datetime import datetime, timedelta

Expand Down Expand Up @@ -45,7 +46,7 @@ def create_dataset(ds, met, orbit_file, root_ds_dir="."):
id = met['data_product_name']
root_ds_dir = os.path.abspath(root_ds_dir)
ds_dir = os.path.join(root_ds_dir, id)
if not os.path.isdir(ds_dir): os.makedirs(ds_dir, 0755)
if not os.path.isdir(ds_dir): os.makedirs(ds_dir, 0o755)

# dump dataset and met JSON
ds_file = os.path.join(ds_dir, "%s.dataset.json" % id)
Expand Down

0 comments on commit 72a353c

Please sign in to comment.