Merge branch 'three'
This commit is contained in:
parent
443058d3b0
commit
fab4344152
60 changed files with 1386 additions and 1366 deletions
31
.travis.yml
31
.travis.yml
|
@ -3,30 +3,29 @@ language: python
|
|||
|
||||
matrix:
|
||||
include:
|
||||
- python: "2.7"
|
||||
env: TOXENV=lint-py27
|
||||
- python: "3.9"
|
||||
env: TOXENV=check,lint-py3
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27
|
||||
- python: "3.9"
|
||||
- python: "3.10"
|
||||
env: TOXENV=check,lint
|
||||
# env: TOXENV=check,lint,mypy
|
||||
- python: "3.10" # EOL 2026-10-04
|
||||
env: TOXENV=py310
|
||||
- python: "3.9" # EOL 2025-10-05
|
||||
env: TOXENV=py39
|
||||
- python: "3.8"
|
||||
- python: "3.8" # EOL 2024-10-14
|
||||
env: TOXENV=py38
|
||||
- python: "3.7"
|
||||
- python: "3.7" # EOL 2023-06-27
|
||||
env: TOXENV=py37
|
||||
dist: xenial
|
||||
- python: "3.6"
|
||||
- python: "3.6" # EOL 2021-12-21
|
||||
env: TOXENV=py36
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35
|
||||
# - python: "3.5" # EOL 2020-09-13
|
||||
# env: TOXENV=py35
|
||||
# - python: "3.4" # EOL 2019-03-18
|
||||
# env: TOXENV=py34
|
||||
- python: "3.10-dev"
|
||||
env: TOXENV=py310
|
||||
- python: "3.11-dev"
|
||||
env: TOXENV=py311
|
||||
allow_failures:
|
||||
- python: "3.10-dev"
|
||||
env: TOXENV=py310
|
||||
- python: "3.11-dev"
|
||||
env: TOXENV=py311
|
||||
|
||||
before_install:
|
||||
# See issue #80: litmus fails to build on travis
|
||||
|
|
16
CHANGELOG.md
16
CHANGELOG.md
|
@ -1,10 +1,22 @@
|
|||
# Changelog
|
||||
|
||||
## 4.0.0 / Unreleased
|
||||
|
||||
- Drop Python 2 support
|
||||
- Add uvicorn server support to CLI, drop flup and CherryPy
|
||||
- Support LibreOffice in dir_browser
|
||||
- DirBrowser supports `?davmount` URLs by default (option `dir_browser.davmount`).
|
||||
The new option `dir_browser.davmount_links` is false by default.
|
||||
- Drop support for Microsoft Web Folders (option `dir_browser.ms_mount`).
|
||||
- TODO: #201, #220
|
||||
|
||||
## 3.1.2 / Unreleased
|
||||
|
||||
## 3.1.1 / 2021-07-11
|
||||
|
||||
- #201 Check also HTTP_X_FORWARDED_HOST as alternative to DESTINATION header
|
||||
- #216 Build with Python 3.8 (Py3.9 doesn't work on Windows7 / Windows Server 2008 R2 / Ealier)
|
||||
- #220 Fix exception whenever user access unknown realm
|
||||
- #216 Build with Python 3.8 (Py3.9 doesn't work on Windows7 / Windows Server 2008 R2 / earlier)
|
||||
- #220 Fix exception whenever users access unknown realm
|
||||
- #225 Include changelog in PyPI package
|
||||
|
||||
## 3.1.0 / 2021-01-04
|
||||
|
|
8
Pipfile
8
Pipfile
|
@ -4,19 +4,20 @@ verify_ssl = true
|
|||
name = "pypi"
|
||||
|
||||
[dev-packages]
|
||||
black = "==20.8b1"
|
||||
black = "==21.9b0"
|
||||
Cheroot = "*" # "~=8.5"
|
||||
cx_Freeze = {version = "*", os_name = "== 'nt'"}
|
||||
flake8 = "*" # "~=3.8"
|
||||
flake8-bugbear = "*" # "~=20.1"
|
||||
flake8-quotes = "*" # "~=1.0"
|
||||
isort = "*"
|
||||
mypy = "*"
|
||||
Paste = "*" # "~=2.0"
|
||||
pylint = "*"
|
||||
pytest = "*" # "~=4.6"
|
||||
pytest-cov = "*" # "~=2.10"
|
||||
python-pam = "*" # "~=1.8"
|
||||
pywin32 = "*"
|
||||
pywin32 = {version = "*", os_name = "== 'nt'"}
|
||||
recommonmark = "*"
|
||||
requests = "*"
|
||||
rope = "*"
|
||||
|
@ -36,7 +37,6 @@ safety = "*"
|
|||
defusedxml = "*" # "~=0.5"
|
||||
Jinja2 = "*" # "~=2.10"
|
||||
PyYAML = "*" # "~=5.1"
|
||||
six = "*" # "~=1.13"
|
||||
json5 = "*"
|
||||
|
||||
[requires]
|
||||
|
@ -44,4 +44,4 @@ json5 = "*"
|
|||
python_version = "3.8"
|
||||
|
||||
[pipenv]
|
||||
allow_prereleases = true
|
||||
# allow_prereleases = true
|
||||
|
|
967
Pipfile.lock
generated
967
Pipfile.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,5 @@
|
|||
#  WsgiDAV
|
||||
[](https://travis-ci.org/mar10/wsgidav)
|
||||
[](https://travis-ci.com/mar10/wsgidav)
|
||||
[](https://pypi.python.org/pypi/WsgiDAV/)
|
||||
[](https://github.com/mar10/wsgidav/blob/master/LICENSE)
|
||||
[](http://wsgidav.readthedocs.io/)
|
||||
|
|
|
@ -150,10 +150,10 @@ dir_browser = {
|
|||
"enable": True, # Render HTML listing for GET requests on collections
|
||||
"ignore": [],
|
||||
"response_trailer": "", # Raw HTML code, appended as footer
|
||||
"davmount": False, # Send <dm:mount> response if request URL contains '?davmount'
|
||||
"ms_mount": False, # Add an 'open as webfolder' link (requires Windows)
|
||||
"ms_sharepoint_support": True, # Invoke MS Offce documents for editing using WebDAV
|
||||
# "app_class": MyBrowser, # (DEPRECATED with 2.4.0) Used instead of WsgiDavDirBrowser
|
||||
"davmount": True, # Send <dm:mount> response if request URL contains '?davmount'
|
||||
"davmount_links": False, # Add 'davmount' link at the top of the listing
|
||||
"ms_sharepoint_support": True, # Invoke MS Office documents for editing using WebDAV
|
||||
"libre_office_support": True, # Invoke Libre Office documents for editing using WebDAV
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@ Package ``wsgidav``
|
|||
.. autosummary::
|
||||
:toctree: _autosummary
|
||||
|
||||
wsgidav.compat
|
||||
wsgidav.dav_error
|
||||
wsgidav.dav_provider
|
||||
wsgidav.debug_filter
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
# Disable Black formatting
|
||||
# fmt: off
|
||||
|
||||
# type: ignore
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
@ -31,7 +33,7 @@ on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
|||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
package_root = os.path.abspath('../..')
|
||||
sys.path.insert(0, package_root)
|
||||
print "Add package root to sys.path: %r" % package_root
|
||||
print("Add package root to sys.path: %r" % package_root)
|
||||
# for fn in os.listdir(package_root):
|
||||
# print "-", fn
|
||||
|
||||
|
@ -108,10 +110,10 @@ import pkg_resources
|
|||
try:
|
||||
release = pkg_resources.get_distribution('wsgidav').version
|
||||
except pkg_resources.DistributionNotFound:
|
||||
print 'To build the documentation, The distribution information'
|
||||
print 'Has to be available. Either install the package into your'
|
||||
print 'development environment or run "setup.py develop" to setup the'
|
||||
print 'metadata. A virtualenv is recommended!'
|
||||
print('To build the documentation, The distribution information')
|
||||
print('Has to be available. Either install the package into your')
|
||||
print('development environment or run "setup.py develop" to setup the')
|
||||
print('metadata. A virtualenv is recommended!')
|
||||
sys.exit(1)
|
||||
del pkg_resources
|
||||
|
||||
|
@ -129,7 +131,7 @@ version = '.'.join(release.split('.')[:2])
|
|||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ['_build', 'build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
"user_mapping": {
|
||||
"*": {
|
||||
"user1": {
|
||||
"password": "abc123",
|
||||
"password": "abc123"
|
||||
}
|
||||
},
|
||||
"/share2": true // Allow anonymous access
|
||||
|
@ -39,8 +39,8 @@
|
|||
"dir_browser": {
|
||||
"enable": true,
|
||||
"response_trailer": "",
|
||||
"davmount": false,
|
||||
"ms_mount": false,
|
||||
"davmount": true,
|
||||
"davmount_links": false,
|
||||
"ms_sharepoint_support": true,
|
||||
"htdocs_path": null
|
||||
}
|
||||
|
|
26
mypy.ini
Normal file
26
mypy.ini
Normal file
|
@ -0,0 +1,26 @@
|
|||
# Global options:
|
||||
|
||||
[mypy]
|
||||
warn_return_any = True
|
||||
warn_unused_configs = True
|
||||
# Ignore errors like
|
||||
# `Cannot find implementation or library stub for module named 'lxml'`
|
||||
# https://stackoverflow.com/a/57815124/19166
|
||||
ignore_missing_imports = True
|
||||
|
||||
# Per-module options:
|
||||
|
||||
; [mypy-benchmarks]
|
||||
; disallow_untyped_defs = True
|
||||
|
||||
; [mypy-lxml]
|
||||
; ignore_missing_imports = True
|
||||
|
||||
; [mypy-wsgidav.xml_tools]
|
||||
; warn_return_any = False
|
||||
|
||||
; [mypy-mycode.bar]
|
||||
; warn_return_any = False
|
||||
|
||||
; [mypy-somelibrary]
|
||||
; ignore_missing_imports = True
|
|
@ -1,7 +1,6 @@
|
|||
[tool.black]
|
||||
line-length = 88
|
||||
#py36 = false # don't strip 'u' from native strings
|
||||
target-version = ['py27', 'py35', 'py36', 'py37', 'py38']
|
||||
target-version = ['py36', 'py37', 'py38']
|
||||
include = '\.pyi?$'
|
||||
exclude = '''
|
||||
/(
|
||||
|
|
|
@ -2,5 +2,4 @@ defusedxml~=0.5
|
|||
Jinja2~=2.10
|
||||
json5~=0.8.5
|
||||
python-pam~=1.8
|
||||
PyYAML~=5.1
|
||||
six~=1.13
|
||||
PyYAML~=6.0
|
||||
|
|
|
@ -202,11 +202,10 @@ dir_browser:
|
|||
show_user: true
|
||||
show_logout: true
|
||||
#: Send <dm:mount> response if request URL contains '?davmount'
|
||||
#: Also add a respective link at the top of the listing
|
||||
#: (See https://tools.ietf.org/html/rfc4709)
|
||||
davmount: false
|
||||
#: Add an 'open as webfolder' link (requires Windows IE <= 7!)
|
||||
ms_mount: false
|
||||
davmount: true
|
||||
#: Add a 'Mount' link at the top of the listing
|
||||
davmount_links: false
|
||||
#: Invoke MS Office documents for editing using WebDAV by adding a JavaScript
|
||||
#: click handler.
|
||||
#: - For IE 11 and below invokes the SharePoint ActiveXObject("SharePoint.OpenDocuments")
|
||||
|
@ -214,6 +213,9 @@ dir_browser:
|
|||
#: https://docs.microsoft.com/en-us/previous-versions/office/developer/sharepoint-2010/ff407576(v%3Doffice.14)
|
||||
#: - Otherwise the Office URL prefix is used (e.g. 'ms-word:ofe|u|http://server/path/file.docx')
|
||||
ms_sharepoint_support: true
|
||||
#:
|
||||
libre_office_support: true
|
||||
#:
|
||||
#: The path to the directory that contains template.html and associated
|
||||
#: assets.
|
||||
#: The default is the htdocs directory within the dir_browser directory.
|
||||
|
|
11
setup.cfg
11
setup.cfg
|
@ -23,8 +23,8 @@ license = MIT
|
|||
license_file = LICENSE
|
||||
classifiers =
|
||||
# Development Status :: 3 - Alpha
|
||||
# Development Status :: 4 - Beta
|
||||
Development Status :: 5 - Production/Stable
|
||||
Development Status :: 4 - Beta
|
||||
# Development Status :: 5 - Production/Stable
|
||||
Environment :: Console
|
||||
Intended Audience :: Developers
|
||||
Intended Audience :: Information Technology
|
||||
|
@ -32,15 +32,13 @@ classifiers =
|
|||
License :: OSI Approved :: MIT License
|
||||
Operating System :: OS Independent
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
# Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: 3.5
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: 3.6
|
||||
Programming Language :: Python :: 3.7
|
||||
Programming Language :: Python :: 3.8
|
||||
Programming Language :: Python :: 3.9
|
||||
Programming Language :: Python :: 3.10
|
||||
Topic :: Internet :: WWW/HTTP
|
||||
Topic :: Internet :: WWW/HTTP :: HTTP Servers
|
||||
Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
|
@ -64,7 +62,6 @@ install_requires =
|
|||
Jinja2
|
||||
json5
|
||||
PyYAML
|
||||
six
|
||||
|
||||
# [options.package_data]
|
||||
# * = *.txt, *.rst
|
||||
|
|
|
@ -69,7 +69,6 @@ install_requires = [
|
|||
"jinja2", # NOTE: we must use lower-case name, otherwise import will fail
|
||||
"json5",
|
||||
"yaml", # NOTE: must import 'yaml' (but dependency is names 'PyYAML')
|
||||
"six",
|
||||
# Used by wsgidav.dc.nt_dc:
|
||||
"win32net",
|
||||
"win32netcon",
|
||||
|
@ -126,7 +125,7 @@ bdist_msi_options = {
|
|||
setup(
|
||||
name="WsgiDAV",
|
||||
version=version,
|
||||
author="Martin Wendt, Ho Chun Wei",
|
||||
author="Martin Wendt",
|
||||
author_email="wsgidav@wwwendt.de",
|
||||
maintainer="Martin Wendt",
|
||||
maintainer_email="wsgidav@wwwendt.de",
|
||||
|
|
|
@ -48,57 +48,26 @@ Test cases
|
|||
subfolder10-10/
|
||||
file10-10-1.txt -> 1k
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from tests.util import Timing, WsgiDavTestServer
|
||||
from wsgidav import __version__
|
||||
from wsgidav import __version__, util
|
||||
from wsgidav.xml_tools import use_lxml
|
||||
|
||||
try:
|
||||
# WsgiDAV 2.x
|
||||
from wsgidav import compat
|
||||
except ImportError:
|
||||
# WsgiDAV 1.x: mock the compat module, so benchmark.py runs in both
|
||||
# versions.
|
||||
# Note that we only need to support Py2 for WsgiDAV 1.x
|
||||
class compat(object):
|
||||
xrange = xrange # noqa: F821
|
||||
is_unicode = lambda s: isinstance(s, unicode) # noqa: E731, F821
|
||||
|
||||
@staticmethod
|
||||
def to_bytes(s, encoding="utf8"):
|
||||
"""Convert unicode (text strings) to binary data, i.e. str on Py2 and bytes on Py3."""
|
||||
if type(s) is unicode: # noqa: F821
|
||||
s = s.encode(encoding)
|
||||
elif type(s) is not str:
|
||||
s = str(s)
|
||||
return s
|
||||
|
||||
from io import StringIO
|
||||
except ImportError: # Py2
|
||||
from cStringIO import StringIO # type: ignore
|
||||
|
||||
try:
|
||||
try:
|
||||
# from cherrypy import __version__ as cp_version
|
||||
from cheroot import wsgi
|
||||
|
||||
from cheroot import wsgi
|
||||
cp_version = wsgi.Server.version
|
||||
|
||||
cp_version = wsgi.Server.version
|
||||
|
||||
except ImportError:
|
||||
# Bundled CherryPy wsgiserver in WsgDAV 1.x
|
||||
server_folder = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "wsgidav", "server")
|
||||
)
|
||||
sys.path.append(server_folder)
|
||||
from cherrypy import wsgiserver
|
||||
|
||||
cp_version = wsgiserver.CherryPyWSGIServer.version
|
||||
except ImportError:
|
||||
cp_version = "unknown"
|
||||
raise
|
||||
|
@ -145,37 +114,37 @@ def _bench_script(opts):
|
|||
# Prepare big file with 10 MB
|
||||
lines = []
|
||||
line = "." * (1000 - 6 - len("\n"))
|
||||
for i in compat.xrange(10 * 1000):
|
||||
for i in range(10 * 1000):
|
||||
lines.append("%04i: %s\n" % (i, line))
|
||||
data_10m = "".join(lines)
|
||||
data_10m = compat.to_bytes(data_10m)
|
||||
data_10m = util.to_bytes(data_10m)
|
||||
|
||||
with Timing("Setup fixture"):
|
||||
_setup_fixture(opts, client)
|
||||
|
||||
# PUT files
|
||||
with Timing("1000 x PUT 1 kB", 1000, "{:>6.1f} req/sec", 1, "{:>7,.3f} MB/sec"):
|
||||
for _ in compat.xrange(1000):
|
||||
for _ in range(1000):
|
||||
client.put("/test/file1.txt", data_1k)
|
||||
client.check_response()
|
||||
|
||||
with Timing("10 x PUT 10 MB", 10, "{:>6.1f} req/sec", 100, "{:>7,.3f} MB/sec"):
|
||||
for _ in compat.xrange(10):
|
||||
for _ in range(10):
|
||||
client.put("/test/bigfile.txt", data_10m)
|
||||
client.check_response()
|
||||
|
||||
with Timing("1000 x GET 1 kB", 1000, "{:>6.1f} req/sec", 1, "{:>7,.3f} MB/sec"):
|
||||
for _ in compat.xrange(1000):
|
||||
body = client.get("/test/file1.txt")
|
||||
for _ in range(1000):
|
||||
_body = client.get("/test/file1.txt")
|
||||
client.check_response()
|
||||
|
||||
with Timing("10 x GET 10 MB", 10, "{:>6.1f} req/sec", 100, "{:>7,.3f} MB/sec"):
|
||||
for _ in compat.xrange(10):
|
||||
body = client.get("/test/bigfile.txt") # noqa F841
|
||||
for _ in range(10):
|
||||
_body = client.get("/test/bigfile.txt") # noqa F841
|
||||
client.check_response()
|
||||
|
||||
with Timing("10 x COPY 10 MB", 10, "{:>6.1f} req/sec", 100, "{:>7,.3f} MB/sec"):
|
||||
for _ in compat.xrange(10):
|
||||
for _ in range(10):
|
||||
client.copy(
|
||||
"/test/bigfile.txt",
|
||||
"/test/bigfile-copy.txt",
|
||||
|
@ -186,14 +155,14 @@ def _bench_script(opts):
|
|||
|
||||
with Timing("100 x MOVE 10 MB", 100, "{:>6.1f} req/sec"):
|
||||
name_from = "/test/bigfile-copy.txt"
|
||||
for i in compat.xrange(100):
|
||||
for i in range(100):
|
||||
name_to = "/test/bigfile-copy-{}.txt".format(i)
|
||||
client.move(name_from, name_to, depth="infinity", overwrite=True)
|
||||
name_from = name_to
|
||||
client.check_response()
|
||||
|
||||
with Timing("100 x LOCK/UNLOCK", 200, "{:>6.1f} req/sec"):
|
||||
for _ in compat.xrange(100):
|
||||
for _ in range(100):
|
||||
locks = client.set_lock(
|
||||
"/test/lock-0",
|
||||
owner="test-bench",
|
||||
|
@ -206,7 +175,7 @@ def _bench_script(opts):
|
|||
client.check_response()
|
||||
|
||||
with Timing("1000 x PROPPATCH", 1000, "{:>6.1f} req/sec"):
|
||||
for _ in compat.xrange(1000):
|
||||
for _ in range(1000):
|
||||
client.proppatch(
|
||||
"/test/file1.txt",
|
||||
set_props=[("{testns:}testname", "testval")],
|
||||
|
@ -215,7 +184,7 @@ def _bench_script(opts):
|
|||
client.check_response()
|
||||
|
||||
with Timing("500 x PROPFIND", 500, "{:>6.1f} req/sec"):
|
||||
for _ in compat.xrange(500):
|
||||
for _ in range(500):
|
||||
client.propfind(
|
||||
"/", properties="allprop", namespace="DAV:", depth=None, headers=None
|
||||
)
|
||||
|
@ -239,9 +208,9 @@ def run_benchmarks(opts):
|
|||
print("OS: {}".format(platform.platform(aliased=True)))
|
||||
|
||||
if use_lxml:
|
||||
from lxml.etree import LXML_VERSION as lxml_version
|
||||
import lxml.etree
|
||||
|
||||
print("lxml: {}".format(lxml_version))
|
||||
print("lxml: {}".format(lxml.etree.LXML_VERSION))
|
||||
else:
|
||||
print("lxml: (not installed)")
|
||||
|
||||
|
@ -263,9 +232,9 @@ def run_benchmarks(opts):
|
|||
|
||||
prof = cProfile.Profile()
|
||||
prof = prof.runctx("_runner(opts)", globals(), locals())
|
||||
stream = compat.StringIO()
|
||||
stream = StringIO()
|
||||
stats = pstats.Stats(prof, stream=stream)
|
||||
# stats.sort_stats("time") # Or cumulative
|
||||
# stats.sort_stats("time") # Or cumulative
|
||||
stats.sort_stats("cumulative") # Or time
|
||||
stats.print_stats(20) # 80 = how many to print
|
||||
# The rest is optional.
|
||||
|
|
|
@ -26,44 +26,47 @@
|
|||
# - Use requests instead of http.client / httplib
|
||||
|
||||
import copy
|
||||
import sys
|
||||
from base64 import encodebytes as base64_encodebytes
|
||||
from io import BytesIO
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import requests
|
||||
|
||||
PY2 = sys.version_info < (3, 0)
|
||||
|
||||
if PY2:
|
||||
from base64 import encodestring as base64_encodebytes
|
||||
|
||||
from cStringIO import StringIO
|
||||
|
||||
BytesIO = StringIO
|
||||
from urlparse import urljoin, urlparse
|
||||
|
||||
is_bytes = lambda s: isinstance(s, str) # noqa: E731
|
||||
is_unicode = lambda s: isinstance(s, unicode) # noqa: E731, F821
|
||||
to_native = lambda s: s if is_bytes(s) else s.encode("utf8") # noqa: E731
|
||||
else:
|
||||
from base64 import encodebytes as base64_encodebytes
|
||||
from io import BytesIO, StringIO
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
xrange = range
|
||||
is_bytes = lambda s: isinstance(s, bytes) # noqa: E731
|
||||
is_unicode = lambda s: isinstance(s, str) # noqa: E731
|
||||
to_native = lambda s: s if is_unicode(s) else s.decode("utf8") # noqa: E731
|
||||
|
||||
is_native = lambda s: isinstance(s, str) # noqa: E731
|
||||
to_bytes = lambda s: s if is_bytes(s) else s.encode("utf8") # noqa: E731
|
||||
|
||||
try:
|
||||
from xml.etree import ElementTree
|
||||
except Exception:
|
||||
from elementtree import ElementTree
|
||||
|
||||
__all__ = ["DAVClient"]
|
||||
|
||||
|
||||
def is_basestring(s):
|
||||
"""Return True for any string type (for str/unicode on Py2 and bytes/str on Py3)."""
|
||||
return isinstance(s, (str, bytes))
|
||||
|
||||
|
||||
def is_bytes(s):
|
||||
"""Return True for bytestrings (for str on Py2 and bytes on Py3)."""
|
||||
return isinstance(s, bytes)
|
||||
|
||||
|
||||
def is_str(s):
|
||||
"""Return True for native strings (for str on Py2 and Py3)."""
|
||||
return isinstance(s, str)
|
||||
|
||||
|
||||
def to_bytes(s, encoding="utf8"):
|
||||
"""Convert a text string (unicode) to bytestring (str on Py2 and bytes on Py3)."""
|
||||
if type(s) is not bytes:
|
||||
s = bytes(s, encoding)
|
||||
return s
|
||||
|
||||
|
||||
def to_str(s, encoding="utf8"):
|
||||
"""Convert data to native str type (bytestring on Py2 and unicode on Py3)."""
|
||||
if type(s) is bytes:
|
||||
s = str(s, encoding)
|
||||
elif type(s) is not str:
|
||||
s = str(s)
|
||||
return s
|
||||
|
||||
|
||||
class AppError(Exception):
|
||||
pass
|
||||
|
||||
|
@ -319,7 +322,7 @@ class DAVClient(object):
|
|||
"""Property find. If properties arg is unspecified it defaults to 'allprop'."""
|
||||
# Build propfind xml
|
||||
root = ElementTree.Element("{DAV:}propfind")
|
||||
if is_native(properties):
|
||||
if is_str(properties):
|
||||
ElementTree.SubElement(root, "{DAV:}%s" % properties)
|
||||
else:
|
||||
props = ElementTree.SubElement(root, "{DAV:}prop")
|
||||
|
@ -453,7 +456,7 @@ class DAVClient(object):
|
|||
|
||||
Inspired by paste.fixture
|
||||
"""
|
||||
__tracebackhide__ = True
|
||||
__tracebackhide__ = True # pylint: disable=unused-variable
|
||||
res = self.response
|
||||
full_status = "%s %s" % (res.status_code, res.reason)
|
||||
|
||||
|
|
|
@ -9,6 +9,17 @@ We use it to test stressor against a locally running WsgiDAV server:
|
|||
3. Open a second terminal and run
|
||||
$ stressor run tests/stressor/test_rw.yaml -q
|
||||
```
|
||||
## 2021-11-09
|
||||
> Seems that stressor is the limiting factor
|
||||
(MacBook, i5 2,9GHz, macOs 12.0.1, Py3.9)
|
||||
- Cheroot 8.5.2 Executed 9,700 activities
|
||||
- gevent 21.8.0 Executed 9,704 activities
|
||||
- gunicorn 20.1.0 Executed 9,324 activities
|
||||
- uvicorn 0.15.0 Executed 8,036 activities
|
||||
- paste 0.5 Executed 9,756 activities
|
||||
- wsgiref 0.2 Executed 8,188 activities ERRORS: 27 (NewConnectionError)
|
||||
- ext_wsgiutils Executed 9,668 activities
|
||||
|
||||
## 2021-01-04
|
||||
(PC, Windows 10)
|
||||
- Cheroot 8.5.1 Executed 16,660 activities
|
||||
|
|
|
@ -14,6 +14,7 @@ config:
|
|||
$ stressor run tests/stressor/test_rw -q
|
||||
verbose: 3
|
||||
base_url: http://127.0.0.1:8082
|
||||
# base_url: https://127.0.0.1:8082
|
||||
request_timeout: 1
|
||||
|
||||
# Initial context value definitions.
|
||||
|
@ -26,6 +27,7 @@ sessions:
|
|||
users: $load(users.yaml)
|
||||
count: 10
|
||||
# basic_auth: true
|
||||
verify_ssl: false
|
||||
|
||||
# Define what actions should be performed by every session
|
||||
scenario:
|
||||
|
|
|
@ -23,7 +23,7 @@ def setUpModule():
|
|||
|
||||
|
||||
def tearDownModule():
|
||||
global _test_server
|
||||
# global _test_server
|
||||
|
||||
if _test_server:
|
||||
_test_server.stop()
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
"""
|
||||
Run litmus against WsgiDAV server.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import subprocess
|
||||
import unittest
|
||||
|
||||
|
|
|
@ -10,8 +10,6 @@
|
|||
See http://chandlerproject.org/Projects/Davclient
|
||||
http://svn.osafoundation.org/tools/davclient/trunk/src/davclient/davclient.py
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
|
@ -20,7 +18,7 @@ from threading import Thread
|
|||
|
||||
from tests import davclient
|
||||
from tests.util import WsgiDavTestServer
|
||||
from wsgidav import compat
|
||||
from wsgidav import util
|
||||
from wsgidav.fs_dav_provider import FilesystemProvider
|
||||
from wsgidav.server.ext_wsgiutils_server import ExtServer
|
||||
from wsgidav.wsgidav_app import WsgiDAVApp
|
||||
|
@ -218,10 +216,10 @@ class ServerTest(unittest.TestCase):
|
|||
# Big file with 10 MB
|
||||
lines = []
|
||||
line = "." * (1000 - 6 - len("\n"))
|
||||
for i in compat.xrange(10 * 1000):
|
||||
for i in range(10 * 1000):
|
||||
lines.append("%04i: %s\n" % (i, line))
|
||||
data3 = "".join(lines)
|
||||
data3 = compat.to_bytes(data3)
|
||||
data3 = util.to_bytes(data3)
|
||||
|
||||
# Cleanup
|
||||
client.delete("/test/")
|
||||
|
@ -327,7 +325,7 @@ class ServerTest(unittest.TestCase):
|
|||
#
|
||||
# # Request must not contain a body (expect '415 Media Type Not Supported')
|
||||
# app.get("/file1.txt",
|
||||
# headers={"Content-Length": compat.to_native(len(data1))},
|
||||
# headers={"Content-Length": util.to_str(len(data1))},
|
||||
# params=data1,
|
||||
# status=415)
|
||||
#
|
||||
|
|
|
@ -15,7 +15,7 @@ from tempfile import gettempdir
|
|||
import requests
|
||||
|
||||
from tests.util import Timing, WsgiDavTestServer, write_test_file
|
||||
from wsgidav import compat
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_provider import DAVNonCollection, DAVProvider
|
||||
from wsgidav.stream_tools import FileLikeQueue
|
||||
|
||||
|
@ -53,7 +53,7 @@ class MockProxyResource(DAVNonCollection):
|
|||
while data:
|
||||
s += len(data)
|
||||
# print("_consumer: read(): write")
|
||||
f.write(compat.to_bytes(data))
|
||||
f.write(util.to_bytes(data))
|
||||
data = queue.read()
|
||||
# print("_consumer(): done", s)
|
||||
|
||||
|
|
|
@ -3,13 +3,12 @@
|
|||
# Licensed under the MIT license:
|
||||
# http://www.opensource.org/licenses/mit-license.php
|
||||
"""Unit tests for wsgidav.util"""
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
import unittest
|
||||
from io import StringIO
|
||||
|
||||
from wsgidav.compat import StringIO
|
||||
from wsgidav.util import (
|
||||
BASE_LOGGER_NAME,
|
||||
get_module_logger,
|
||||
|
|
|
@ -11,15 +11,14 @@
|
|||
See http://webtest.readthedocs.org/en/latest/
|
||||
(successor of http://pythonpaste.org/testing-applications.html)
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import unittest
|
||||
from tempfile import gettempdir
|
||||
from urllib.parse import quote
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.fs_dav_provider import FilesystemProvider
|
||||
from wsgidav.wsgidav_app import WsgiDAVApp
|
||||
|
||||
|
@ -81,7 +80,7 @@ class ServerTest(unittest.TestCase):
|
|||
self.app = webtest.TestApp(wsgi_app)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(compat.to_unicode(self.rootpath))
|
||||
shutil.rmtree(util.to_str(self.rootpath))
|
||||
del self.app
|
||||
|
||||
def testPreconditions(self):
|
||||
|
@ -110,10 +109,10 @@ class ServerTest(unittest.TestCase):
|
|||
# Big file with 10 MB
|
||||
lines = []
|
||||
line = "." * (1000 - 6 - len("\n"))
|
||||
for i in compat.xrange(10 * 1000):
|
||||
for i in range(10 * 1000):
|
||||
lines.append("%04i: %s\n" % (i, line))
|
||||
data3 = "".join(lines)
|
||||
data3 = compat.to_bytes(data3)
|
||||
data3 = util.to_bytes(data3)
|
||||
|
||||
# Remove old test files
|
||||
app.delete("/file1.txt", expect_errors=True)
|
||||
|
@ -147,7 +146,7 @@ class ServerTest(unittest.TestCase):
|
|||
app.request(
|
||||
"/file1.txt",
|
||||
method="GET",
|
||||
headers={"Content-Length": compat.to_native(len(data1))},
|
||||
headers={"Content-Length": util.to_str(len(data1))},
|
||||
body=data1,
|
||||
status=415,
|
||||
)
|
||||
|
@ -164,10 +163,10 @@ class ServerTest(unittest.TestCase):
|
|||
"""Handle special characters."""
|
||||
app = self.app
|
||||
uniData = (
|
||||
u"This is a file with special characters:\n"
|
||||
+ u"Umlaute(äöüß)\n"
|
||||
+ u"Euro(\u20AC)\n"
|
||||
+ u"Male(\u2642)"
|
||||
"This is a file with special characters:\n"
|
||||
+ "Umlaute(äöüß)\n"
|
||||
+ "Euro(\u20AC)\n"
|
||||
+ "Male(\u2642)"
|
||||
)
|
||||
|
||||
data = uniData.encode("utf8")
|
||||
|
@ -199,14 +198,14 @@ class ServerTest(unittest.TestCase):
|
|||
|
||||
def unicode_to_url(s):
|
||||
# TODO: Py3: Is this the correct way?
|
||||
return compat.quote(s.encode("utf8"))
|
||||
return quote(s.encode("utf8"))
|
||||
|
||||
# äöüß: (part of latin1)
|
||||
__testrw(unicode_to_url(u"/file uml(\u00E4\u00F6\u00FC\u00DF).txt"))
|
||||
__testrw(unicode_to_url("/file uml(\u00E4\u00F6\u00FC\u00DF).txt"))
|
||||
# Euro sign (not latin1, but Cp1252)
|
||||
__testrw(unicode_to_url(u"/file euro(\u20AC).txt"))
|
||||
__testrw(unicode_to_url("/file euro(\u20AC).txt"))
|
||||
# Male sign (only utf8)
|
||||
__testrw(unicode_to_url(u"/file male(\u2642).txt"))
|
||||
__testrw(unicode_to_url("/file male(\u2642).txt"))
|
||||
|
||||
def testAuthentication(self):
|
||||
"""Require login."""
|
||||
|
|
|
@ -9,15 +9,13 @@ Example:
|
|||
with WsgiDavTestServer(opts):
|
||||
... test methods
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from tempfile import gettempdir
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.fs_dav_provider import FilesystemProvider
|
||||
from wsgidav.wsgidav_app import WsgiDAVApp
|
||||
|
||||
|
@ -61,7 +59,7 @@ class Timing(object):
|
|||
def write_test_file(name, size):
|
||||
path = os.path.join(gettempdir(), name)
|
||||
with open(path, "wb") as f:
|
||||
f.write(compat.to_bytes("*") * size)
|
||||
f.write(util.to_bytes("*") * size)
|
||||
return path
|
||||
|
||||
|
||||
|
|
45
tox.ini
45
tox.ini
|
@ -2,15 +2,15 @@
|
|||
basepython = python3.9
|
||||
envlist =
|
||||
check
|
||||
lint-py27
|
||||
lint-py3
|
||||
py27 # EOL 2020-01-02
|
||||
py39
|
||||
py38
|
||||
py37
|
||||
py36
|
||||
py35 # EOL 2020-09-13
|
||||
# py34 # EOL 2019-03-18
|
||||
lint
|
||||
# mypy
|
||||
py310 # EOL 2026-10-04
|
||||
py39 # EOL 2025-10-05
|
||||
py38 # EOL 2024-10-14
|
||||
py37 # EOL 2023-06-27
|
||||
py36 # EOL 2021-12-21
|
||||
# py35 # EOL 2020-09-13
|
||||
# py34 # EOL 2019-03-18
|
||||
coverage
|
||||
|
||||
skip_missing_interpreters = true
|
||||
|
@ -58,23 +58,15 @@ commands =
|
|||
coverage report --fail-under=30.0
|
||||
|
||||
|
||||
[testenv:lint-py27]
|
||||
basepython = python2.7
|
||||
[testenv:mypy]
|
||||
skip_install = true
|
||||
# TODO: cannot re-use [lint], because i sort has problems to iterate the
|
||||
# testsfolder (UnicodeDecodeError on Lotos..stengel (???).docx)
|
||||
deps =
|
||||
flake8
|
||||
# helper to generate HTML reports:
|
||||
flake8-html
|
||||
flake8-coding
|
||||
# flake8-pytest
|
||||
mypy
|
||||
changedir = {toxinidir}
|
||||
commands =
|
||||
flake8 wsgidav setup.py --doctests
|
||||
mypy wsgidav tests
|
||||
|
||||
|
||||
[testenv:lint-py3]
|
||||
[testenv:lint]
|
||||
skip_install = true
|
||||
deps =
|
||||
# Required by flake8-bandit
|
||||
|
@ -94,16 +86,7 @@ deps =
|
|||
flake8-bugbear # (B...) Check for error-prone constructs
|
||||
# flake8-builtins # Check for python builtins being used as variables or parameters (A...)
|
||||
flake8-coding # C101 Coding magic comment not found, ...
|
||||
# flake8-commas # C812 missing trailing comma -> Handled by Black
|
||||
# flake8-comprehensions
|
||||
# flake8-docstrings
|
||||
# flake8-eradicate # E800 Found commented out code
|
||||
# flake8-expression-complexity # ECE001 Expression is too complex (X > Y)
|
||||
# flake8-if-expr # KEK100 don`t use "[on_true] if [expression] else [on_false]" syntax
|
||||
# flake8-import-order # (we run `isort --check`)
|
||||
# flake8-isort # (we run `isort --check`)
|
||||
# flake8-logging-format # G001 Logging statement uses string.format()
|
||||
# flake8-pep3101 # Use "".format instead of %
|
||||
# flake8-commas # C812 missing trailing comma -> Handled by Black # flake8-comprehensions # flake8-docstrings # flake8-eradicate # E800 Found commented out code # flake8-expression-complexity # ECE001 Expression is too complex (X > Y) # flake8-if-expr # KEK100 don`t use "[on_true] if [expression] else [on_false]" syntax # flake8-import-order # (we run `isort --check`) # flake8-isort # (we run `isort --check`) # flake8-logging-format # G001 Logging statement uses string.format() # flake8-pep3101 # Use "".format instead of %
|
||||
# flake8-print # T001 print found
|
||||
# flake8-pytest-style # PT009 use a regular assert instead of unittest-style 'assertTrue'
|
||||
flake8-quotes # See also [flake8] config
|
||||
|
|
|
@ -15,7 +15,7 @@ NOTE:
|
|||
When pywin32 is installed, number must be a.b.c for MSI builds?
|
||||
"3.0.0a4" seems not to work in this case!
|
||||
"""
|
||||
__version__ = "3.1.2-a1"
|
||||
__version__ = "4.0.0-a1"
|
||||
|
||||
# make version accessible as 'wsgidav.__version__'
|
||||
# from wsgidav._version import __version__ # noqa: F401
|
||||
|
|
|
@ -1,136 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# (c) 2009-2021 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav
|
||||
# Licensed under the MIT license:
|
||||
# http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
Tool functions to support Python 2 and 3.
|
||||
|
||||
Inspired by six https://pythonhosted.org/six/
|
||||
|
||||
TODO: since it is now based on six, we should remove this module eventually.
|
||||
"""
|
||||
# flake8: noqa
|
||||
|
||||
import sys
|
||||
|
||||
import six
|
||||
from six import PY2, PY3, BytesIO
|
||||
from six.moves import cStringIO as StringIO
|
||||
from six.moves import input as console_input
|
||||
from six.moves import queue, xrange
|
||||
from six.moves.urllib.parse import quote, unquote, urlparse
|
||||
|
||||
# See #174: `collections_abc` would be part of six.moves, but only for
|
||||
# six v1.13+ but we don't want to force users to update their system python's six
|
||||
try:
|
||||
import collections.abc as collections_abc # Python 3.3+
|
||||
except ImportError:
|
||||
import collections as collections_abc
|
||||
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
_filesystemencoding = sys.getfilesystemencoding()
|
||||
|
||||
# String Abstractions
|
||||
|
||||
if PY2:
|
||||
|
||||
from base64 import decodestring as base64_decodebytes
|
||||
from base64 import encodestring as base64_encodebytes
|
||||
from cgi import escape as html_escape
|
||||
|
||||
def is_basestring(s):
|
||||
"""Return True for any string type (for str/unicode on Py2 and bytes/str on Py3)."""
|
||||
return isinstance(s, basestring)
|
||||
|
||||
def is_bytes(s):
|
||||
"""Return True for bytestrings (for str on Py2 and bytes on Py3)."""
|
||||
return isinstance(s, str)
|
||||
|
||||
def is_native(s):
|
||||
"""Return True for native strings (for str on Py2 and Py3)."""
|
||||
return isinstance(s, str)
|
||||
|
||||
def is_unicode(s):
|
||||
"""Return True for unicode strings (for unicode on Py2 and str on Py3)."""
|
||||
return isinstance(s, unicode)
|
||||
|
||||
def to_bytes(s, encoding="utf8"):
|
||||
"""Convert unicode (text strings) to binary data (str on Py2 and bytes on Py3)."""
|
||||
if type(s) is unicode:
|
||||
s = s.encode(encoding)
|
||||
elif type(s) is not str:
|
||||
s = str(s)
|
||||
return s
|
||||
|
||||
to_native = to_bytes
|
||||
"""Convert data to native str type (bytestring on Py2 and unicode on Py3)."""
|
||||
|
||||
def to_unicode(s, encoding="utf8"):
|
||||
"""Convert data to unicode text (unicode on Py2 and str on Py3)."""
|
||||
if type(s) is not unicode:
|
||||
s = unicode(s, encoding)
|
||||
return s
|
||||
|
||||
|
||||
else: # Python 3
|
||||
|
||||
from base64 import decodebytes as base64_decodebytes
|
||||
from base64 import encodebytes as base64_encodebytes
|
||||
from html import escape as html_escape
|
||||
|
||||
def is_basestring(s):
|
||||
"""Return True for any string type (for str/unicode on Py2 and bytes/str on Py3)."""
|
||||
return isinstance(s, (str, bytes))
|
||||
|
||||
def is_bytes(s):
|
||||
"""Return True for bytestrings (for str on Py2 and bytes on Py3)."""
|
||||
return isinstance(s, bytes)
|
||||
|
||||
def is_native(s):
|
||||
"""Return True for native strings (for str on Py2 and Py3)."""
|
||||
return isinstance(s, str)
|
||||
|
||||
def is_unicode(s):
|
||||
"""Return True for unicode strings (for unicode on Py2 and str on Py3)."""
|
||||
return isinstance(s, str)
|
||||
|
||||
def to_bytes(s, encoding="utf8"):
|
||||
"""Convert a text string (unicode) to bytestring (str on Py2 and bytes on Py3)."""
|
||||
if type(s) is not bytes:
|
||||
s = bytes(s, encoding)
|
||||
return s
|
||||
|
||||
def to_native(s, encoding="utf8"):
|
||||
"""Convert data to native str type (bytestring on Py2 and unicode on Py3)."""
|
||||
if type(s) is bytes:
|
||||
s = str(s, encoding)
|
||||
elif type(s) is not str:
|
||||
s = str(s)
|
||||
return s
|
||||
|
||||
to_unicode = to_native
|
||||
"""Convert binary data to unicode (text strings) on Python 2 and 3."""
|
||||
|
||||
|
||||
# Binary Strings
|
||||
|
||||
b_empty = to_bytes("")
|
||||
b_slash = to_bytes("/")
|
||||
|
||||
|
||||
# WSGI support
|
||||
|
||||
|
||||
def unicode_to_wsgi(u):
|
||||
"""Convert an environment variable to a WSGI 'bytes-as-unicode' string."""
|
||||
# Taken from PEP3333; the server should already have performed this, when
|
||||
# passing environ to the WSGI application
|
||||
return u.encode(_filesystemencoding, "surrogateescape").decode("iso-8859-1")
|
||||
|
||||
|
||||
def wsgi_to_bytes(s):
|
||||
"""Convert a native string to a WSGI / HTTP compatible byte string."""
|
||||
# Taken from PEP3333
|
||||
return s.encode("iso-8859-1")
|
|
@ -7,13 +7,11 @@
|
|||
Implements a DAVError class that is used to signal WebDAV and HTTP errors.
|
||||
"""
|
||||
import datetime
|
||||
from html import escape as html_escape
|
||||
|
||||
from wsgidav import __version__, compat, xml_tools
|
||||
from wsgidav import __version__, util, xml_tools
|
||||
from wsgidav.xml_tools import etree
|
||||
|
||||
# import traceback
|
||||
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
# ========================================================================
|
||||
|
@ -40,7 +38,9 @@ HTTP_SEE_OTHER = 303
|
|||
HTTP_NOT_MODIFIED = 304
|
||||
HTTP_USE_PROXY = 305
|
||||
HTTP_TEMP_REDIRECT = 307
|
||||
|
||||
HTTP_BAD_REQUEST = 400
|
||||
HTTP_UNAUTHORIZED = 401
|
||||
HTTP_PAYMENT_REQUIRED = 402
|
||||
HTTP_FORBIDDEN = 403
|
||||
HTTP_NOT_FOUND = 404
|
||||
|
@ -85,6 +85,7 @@ ERROR_DESCRIPTIONS = {
|
|||
HTTP_NO_CONTENT: "204 No Content",
|
||||
HTTP_NOT_MODIFIED: "304 Not Modified",
|
||||
HTTP_BAD_REQUEST: "400 Bad Request",
|
||||
HTTP_UNAUTHORIZED: "401 Unauthorized",
|
||||
HTTP_FORBIDDEN: "403 Forbidden",
|
||||
HTTP_METHOD_NOT_ALLOWED: "405 Method Not Allowed",
|
||||
HTTP_NOT_FOUND: "404 Not Found",
|
||||
|
@ -108,6 +109,7 @@ ERROR_DESCRIPTIONS = {
|
|||
ERROR_RESPONSES = {
|
||||
HTTP_BAD_REQUEST: "An invalid request was specified",
|
||||
HTTP_NOT_FOUND: "The specified resource was not found",
|
||||
HTTP_UNAUTHORIZED: "Invalid authentication credentials for the requested resource",
|
||||
HTTP_FORBIDDEN: "Access denied to the specified resource",
|
||||
HTTP_INTERNAL_ERROR: "An internal server error occurred",
|
||||
HTTP_NOT_IMPLEMENTED: "Not implemented",
|
||||
|
@ -161,7 +163,7 @@ class DAVErrorCondition(object):
|
|||
return error_el
|
||||
|
||||
def as_string(self):
|
||||
return compat.to_native(xml_tools.xml_to_bytes(self.as_xml(), True))
|
||||
return util.to_str(xml_tools.xml_to_bytes(self.as_xml(), True))
|
||||
|
||||
|
||||
# ========================================================================
|
||||
|
@ -189,7 +191,7 @@ class DAVError(Exception):
|
|||
self.context_info = context_info
|
||||
self.src_exception = src_exception
|
||||
self.err_condition = err_condition
|
||||
if compat.is_native(err_condition):
|
||||
if util.is_str(err_condition):
|
||||
self.err_condition = DAVErrorCondition(err_condition)
|
||||
assert (
|
||||
self.err_condition is None or type(self.err_condition) is DAVErrorCondition
|
||||
|
@ -224,7 +226,7 @@ class DAVError(Exception):
|
|||
"""Return a tuple (content-type, response page)."""
|
||||
# If it has pre- or post-condition: return as XML response
|
||||
if self.err_condition:
|
||||
return ("application/xml", compat.to_bytes(self.err_condition.as_string()))
|
||||
return ("application/xml", util.to_bytes(self.err_condition.as_string()))
|
||||
|
||||
# Else return as HTML
|
||||
status = get_http_status_string(self)
|
||||
|
@ -240,16 +242,16 @@ class DAVError(Exception):
|
|||
html.append(" <title>{}</title>".format(status))
|
||||
html.append("</head><body>")
|
||||
html.append(" <h1>{}</h1>".format(status))
|
||||
html.append(" <p>{}</p>".format(compat.html_escape(self.get_user_info())))
|
||||
html.append(" <p>{}</p>".format(html_escape(self.get_user_info())))
|
||||
html.append("<hr/>")
|
||||
html.append(
|
||||
"<a href='https://github.com/mar10/wsgidav/'>WsgiDAV/{}</a> - {}".format(
|
||||
__version__, compat.html_escape(str(datetime.datetime.now()), "utf-8")
|
||||
__version__, html_escape(str(datetime.datetime.now()), "utf-8")
|
||||
)
|
||||
)
|
||||
html.append("</body></html>")
|
||||
html = "\n".join(html)
|
||||
return ("text/html", compat.to_bytes(html))
|
||||
return ("text/html", util.to_bytes(html))
|
||||
|
||||
|
||||
def get_http_status_code(v):
|
||||
|
|
|
@ -80,8 +80,11 @@ import os
|
|||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
from wsgidav import compat, util, xml_tools
|
||||
from wsgidav import util, xml_tools
|
||||
from wsgidav.dav_error import (
|
||||
HTTP_FORBIDDEN,
|
||||
HTTP_NOT_FOUND,
|
||||
|
@ -162,8 +165,8 @@ class _DAVResource(object):
|
|||
See also DAVProvider.get_resource_inst().
|
||||
"""
|
||||
|
||||
def __init__(self, path, is_collection, environ):
|
||||
assert compat.is_native(path)
|
||||
def __init__(self, path: str, is_collection: bool, environ: dict):
|
||||
assert util.is_str(path)
|
||||
assert path == "" or path.startswith("/")
|
||||
self.provider = environ["wsgidav.provider"]
|
||||
self.path = path
|
||||
|
@ -183,7 +186,7 @@ class _DAVResource(object):
|
|||
# """
|
||||
# raise NotImplementedError
|
||||
|
||||
def get_content_length(self):
|
||||
def get_content_length(self) -> Optional[int]:
|
||||
"""Contains the Content-Length header returned by a GET without accept
|
||||
headers.
|
||||
|
||||
|
@ -196,7 +199,7 @@ class _DAVResource(object):
|
|||
return None
|
||||
raise NotImplementedError
|
||||
|
||||
def get_content_type(self):
|
||||
def get_content_type(self) -> Optional[str]:
|
||||
"""Contains the Content-Type header returned by a GET without accept
|
||||
headers.
|
||||
|
||||
|
@ -210,7 +213,7 @@ class _DAVResource(object):
|
|||
return None
|
||||
raise NotImplementedError
|
||||
|
||||
def get_creation_date(self):
|
||||
def get_creation_date(self) -> Optional[datetime]:
|
||||
"""Records the time and date the resource was created.
|
||||
|
||||
The creationdate property should be defined on all DAV compliant
|
||||
|
@ -233,7 +236,7 @@ class _DAVResource(object):
|
|||
assert self.is_collection
|
||||
return None
|
||||
|
||||
def get_display_name(self):
|
||||
def get_display_name(self) -> str:
|
||||
"""Provides a name for the resource that is suitable for presentation to
|
||||
a user.
|
||||
|
||||
|
@ -360,7 +363,7 @@ class _DAVResource(object):
|
|||
|
||||
See also comments in DEVELOPERS.txt glossary.
|
||||
"""
|
||||
return compat.quote(self.provider.share_path + self.get_preferred_path())
|
||||
return quote(self.provider.share_path + self.get_preferred_path())
|
||||
|
||||
# def getRefKey(self):
|
||||
# """Return an unambigous identifier string for a resource.
|
||||
|
@ -388,7 +391,7 @@ class _DAVResource(object):
|
|||
# Nautilus chokes, if href encodes '(' as '%28'
|
||||
# So we don't encode 'extra' and 'safe' characters (see rfc2068 3.2.1)
|
||||
safe = "/" + "!*'()," + "$-_|."
|
||||
return compat.quote(
|
||||
return quote(
|
||||
self.provider.mount_path
|
||||
+ self.provider.share_path
|
||||
+ self.get_preferred_path(),
|
||||
|
@ -405,6 +408,15 @@ class _DAVResource(object):
|
|||
# return None
|
||||
# return self.provider.get_resource_inst(parentpath)
|
||||
|
||||
def get_member(self, name):
|
||||
"""Return child resource with a given name (None, if not found).
|
||||
|
||||
This method COULD be overridden by a derived class, for performance
|
||||
reasons.
|
||||
This default implementation calls self.provider.get_resource_inst().
|
||||
"""
|
||||
raise NotImplementedError # implemented by DAVCollecion
|
||||
|
||||
def get_member_list(self):
|
||||
"""Return a list of direct members (_DAVResource or derived objects).
|
||||
|
||||
|
@ -1171,7 +1183,7 @@ class DAVNonCollection(_DAVResource):
|
|||
See also _DAVResource
|
||||
"""
|
||||
|
||||
def __init__(self, path, environ):
|
||||
def __init__(self, path: str, environ: dict):
|
||||
_DAVResource.__init__(self, path, False, environ)
|
||||
|
||||
def get_content_length(self):
|
||||
|
@ -1471,9 +1483,7 @@ class DAVProvider(object):
|
|||
|
||||
Used to calculate the <path> from a storage key by inverting get_ref_url().
|
||||
"""
|
||||
return "/" + compat.unquote(util.lstripstr(ref_url, self.share_path)).lstrip(
|
||||
"/"
|
||||
)
|
||||
return "/" + unquote(util.lstripstr(ref_url, self.share_path)).lstrip("/")
|
||||
|
||||
def get_resource_inst(self, path, environ):
|
||||
"""Return a _DAVResource object for path.
|
||||
|
|
|
@ -37,23 +37,18 @@ namespace in order to define access permissions for the following middleware
|
|||
TODO: Work In Progress / Subject to change
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import abc
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from hashlib import md5
|
||||
|
||||
import six
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
logger = util.get_module_logger(__name__)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseDomainController(object):
|
||||
class BaseDomainController(ABC):
|
||||
#: A domain controller MAY list these values as
|
||||
#: `environ["wsgidav.auth.permissions"] = (<permission>, ...)`
|
||||
known_permissions = ("browse_dir", "delete_resource", "edit_resource")
|
||||
|
@ -91,7 +86,7 @@ class BaseDomainController(object):
|
|||
realm = "/"
|
||||
return realm
|
||||
|
||||
@abc.abstractmethod
|
||||
@abstractmethod
|
||||
def get_domain_realm(self, path_info, environ):
|
||||
"""Return the normalized realm name for a given URL.
|
||||
|
||||
|
@ -112,7 +107,7 @@ class BaseDomainController(object):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
@abstractmethod
|
||||
def require_authentication(self, realm, environ):
|
||||
"""Return False to disable authentication for this request.
|
||||
|
||||
|
@ -148,7 +143,7 @@ class BaseDomainController(object):
|
|||
realm = self.get_domain_realm(path_info, None)
|
||||
return not self.require_authentication(realm, None)
|
||||
|
||||
@abc.abstractmethod
|
||||
@abstractmethod
|
||||
def basic_auth_user(self, realm, user_name, password, environ):
|
||||
"""Check request access permissions for realm/user_name/password.
|
||||
|
||||
|
@ -170,7 +165,7 @@ class BaseDomainController(object):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
@abstractmethod
|
||||
def supports_http_digest_auth(self):
|
||||
"""Signal if this DC instance supports the HTTP digest authentication theme.
|
||||
|
||||
|
@ -203,7 +198,7 @@ class BaseDomainController(object):
|
|||
def _compute_http_digest_a1(self, realm, user_name, password):
|
||||
"""Internal helper for derived classes to compute a digest hash (A1 part)."""
|
||||
data = user_name + ":" + realm + ":" + password
|
||||
A1 = md5(compat.to_bytes(data)).hexdigest()
|
||||
A1 = md5(util.to_bytes(data)).hexdigest()
|
||||
return A1
|
||||
|
||||
def digest_auth_user(self, realm, user_name, environ):
|
||||
|
|
|
@ -75,13 +75,11 @@ Testability and caveats
|
|||
This class is being tested for a network domain (I'm setting one up to test).
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import win32net
|
||||
import win32netcon
|
||||
import win32security
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dc.base_dc import BaseDomainController
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
@ -175,11 +173,11 @@ class NTDomainController(BaseDomainController):
|
|||
server, 0, win32netcon.FILTER_NORMAL_ACCOUNT, 0
|
||||
)
|
||||
# Make sure, we compare unicode
|
||||
un = compat.to_unicode(user_name).lower()
|
||||
un = util.to_str(user_name).lower()
|
||||
for userinfo in users:
|
||||
uiname = userinfo.get("name")
|
||||
assert uiname
|
||||
assert compat.is_unicode(uiname)
|
||||
assert util.is_str(uiname)
|
||||
if un == userinfo["name"].lower():
|
||||
return True
|
||||
except win32net.error as e:
|
||||
|
|
|
@ -9,8 +9,6 @@ Used by HTTPAuthenticator. Only available on linux and macOS.
|
|||
|
||||
See https://wsgidav.readthedocs.io/en/latest/user_guide_configure.html
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import pam
|
||||
|
||||
from wsgidav import util
|
||||
|
|
|
@ -54,7 +54,7 @@ These configuration settings are evaluated:
|
|||
import sys
|
||||
import threading
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.middleware import BaseMiddleware
|
||||
from wsgidav.util import safe_re_encode
|
||||
|
||||
|
@ -182,11 +182,11 @@ class WsgiDavDebugFilter(BaseMiddleware):
|
|||
|
||||
# Check, if response is a binary string, otherwise we probably have
|
||||
# calculated a wrong content-length
|
||||
assert compat.is_bytes(v), v
|
||||
assert util.is_bytes(v), v
|
||||
|
||||
# Dump response body
|
||||
drb = environ.get("wsgidav.dump_response_body")
|
||||
if compat.is_basestring(drb):
|
||||
if util.is_basestring(drb):
|
||||
# Middleware provided a formatted body representation
|
||||
_logger.info(drb)
|
||||
drb = environ["wsgidav.dump_response_body"] = None
|
||||
|
|
|
@ -85,13 +85,12 @@ DEFAULT_CONFIG = {
|
|||
"icon": True,
|
||||
"response_trailer": True, # Raw HTML code, appended as footer (True: use a default)
|
||||
"show_user": True, # Show authenticated user an realm
|
||||
# Send <dm:mount> response if request URL contains '?davmount'
|
||||
"davmount": False,
|
||||
# Add an 'open as webfolder' link (requires Windows clients):
|
||||
"ms_mount": False,
|
||||
"ms_sharepoint_support": True, # Invoke MS Offce documents for editing using WebDAV
|
||||
# "ms_sharepoint_plugin": False, # Invoke MS Offce documents for editing using WebDAV
|
||||
# "ms_sharepoint_urls": False, # Prepend 'ms-word:ofe|u|' to URL for MS Offce documents
|
||||
# Send <dm:mount> response if request URL contains '?davmount' (rfc4709)
|
||||
"davmount": True,
|
||||
# Add 'Mount' link at the top
|
||||
"davmount_links": False,
|
||||
"ms_sharepoint_support": True, # Invoke MS Office documents for editing using WebDAV
|
||||
"libre_office_support": True, # Invoke Libre Office documents for editing using WebDAV
|
||||
# The path to the directory that contains template.html and associated assets.
|
||||
# The default is the htdocs directory within the dir_browser directory.
|
||||
"htdocs_path": None,
|
||||
|
|
|
@ -8,10 +8,11 @@ WSGI middleware that handles GET requests on collections to display directories.
|
|||
import os
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from urllib.parse import unquote
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from wsgidav import __version__, compat, util
|
||||
from wsgidav import __version__, util
|
||||
from wsgidav.dav_error import HTTP_MEDIATYPE_NOT_SUPPORTED, HTTP_OK, DAVError
|
||||
from wsgidav.middleware import BaseMiddleware
|
||||
from wsgidav.util import safe_re_encode
|
||||
|
@ -28,16 +29,17 @@ DAVMOUNT_TEMPLATE = """
|
|||
</dm:mount>
|
||||
""".strip()
|
||||
|
||||
msOfficeTypeToExtMap = {
|
||||
MS_OFFICE_TYPE_TO_EXT_MAP = {
|
||||
"excel": ("xls", "xlt", "xlm", "xlsm", "xlsx", "xltm", "xltx"),
|
||||
"powerpoint": ("pps", "ppt", "pptm", "pptx", "potm", "potx", "ppsm", "ppsx"),
|
||||
"word": ("doc", "dot", "docm", "docx", "dotm", "dotx"),
|
||||
"visio": ("vsd", "vsdm", "vsdx", "vstm", "vstx"),
|
||||
}
|
||||
msOfficeExtToTypeMap = {}
|
||||
for t, el in msOfficeTypeToExtMap.items():
|
||||
MS_OFFICE_EXT_TO_TYPE_MAP = {}
|
||||
for t, el in MS_OFFICE_TYPE_TO_EXT_MAP.items():
|
||||
for e in el:
|
||||
msOfficeExtToTypeMap[e] = t
|
||||
MS_OFFICE_EXT_TO_TYPE_MAP[e] = t
|
||||
OPEN_OFFICE_EXTENSIONS = {"odt", "odp", "odx"}
|
||||
|
||||
|
||||
class WsgiDavDirBrowser(BaseMiddleware):
|
||||
|
@ -103,7 +105,7 @@ class WsgiDavDirBrowser(BaseMiddleware):
|
|||
):
|
||||
collectionUrl = util.make_complete_url(environ)
|
||||
collectionUrl = collectionUrl.split("?", 1)[0]
|
||||
res = compat.to_bytes(DAVMOUNT_TEMPLATE.format(collectionUrl))
|
||||
res = util.to_bytes(DAVMOUNT_TEMPLATE.format(collectionUrl))
|
||||
# TODO: support <dm:open>%s</dm:open>
|
||||
|
||||
start_response(
|
||||
|
@ -120,7 +122,7 @@ class WsgiDavDirBrowser(BaseMiddleware):
|
|||
context = self._get_context(environ, dav_res)
|
||||
|
||||
res = self.template.render(**context)
|
||||
res = compat.to_bytes(res)
|
||||
res = util.to_bytes(res)
|
||||
start_response(
|
||||
"200 OK",
|
||||
[
|
||||
|
@ -152,12 +154,18 @@ class WsgiDavDirBrowser(BaseMiddleware):
|
|||
assert dav_res.is_collection
|
||||
|
||||
is_readonly = environ["wsgidav.provider"].is_readonly()
|
||||
ms_sharepoint_support = self.dir_config.get("ms_sharepoint_support")
|
||||
libre_office_support = self.dir_config.get("libre_office_support")
|
||||
|
||||
# TODO: WebDAV URLs only on Windows?
|
||||
# TODO: WebDAV URLs only on HTTPS?
|
||||
is_windows = "Windows NT " in environ.get("HTTP_USER_AGENT", "")
|
||||
|
||||
context = {
|
||||
"htdocs": (self.config.get("mount_path") or "") + ASSET_SHARE,
|
||||
"rows": [],
|
||||
"version": __version__,
|
||||
"display_path": compat.unquote(dav_res.get_href()),
|
||||
"display_path": unquote(dav_res.get_href()),
|
||||
"url": dav_res.get_href(), # util.make_complete_url(environ),
|
||||
"parent_url": util.get_uri_parent(dav_res.get_href()),
|
||||
"config": self.dir_config,
|
||||
|
@ -198,23 +206,33 @@ class WsgiDavDirBrowser(BaseMiddleware):
|
|||
a_classes = []
|
||||
if res.is_collection:
|
||||
tr_classes.append("directory")
|
||||
add_link_html = []
|
||||
|
||||
if not is_readonly and not res.is_collection:
|
||||
ext = os.path.splitext(href)[1].lstrip(".").lower()
|
||||
officeType = msOfficeExtToTypeMap.get(ext)
|
||||
if officeType:
|
||||
if self.dir_config.get("ms_sharepoint_support"):
|
||||
ofe_prefix = "ms-{}:ofe|u|".format(officeType)
|
||||
ms_office_type = MS_OFFICE_EXT_TO_TYPE_MAP.get(ext)
|
||||
if ms_office_type:
|
||||
if ms_sharepoint_support:
|
||||
ofe_prefix = "ms-{}:ofe|u|".format(ms_office_type)
|
||||
a_classes.append("msoffice")
|
||||
if libre_office_support:
|
||||
add_link_html.append(f"<a class='edit2' title='Edit with Libre Office' href='vnd.libreoffice.command:ofv|u|{href}'>Edit</a>")
|
||||
# ofe_prefix_2 = "vnd.libreoffice.command:ofv|u|"
|
||||
# a_classes.append("msoffice")
|
||||
elif libre_office_support:
|
||||
ofe_prefix = "vnd.libreoffice.command:ofv|u|"
|
||||
# a_classes.append("msoffice")
|
||||
|
||||
elif ext in OPEN_OFFICE_EXTENSIONS:
|
||||
if libre_office_support:
|
||||
ofe_prefix = "vnd.libreoffice.command:ofv|u|"
|
||||
a_classes.append("msoffice")
|
||||
# elif self.dir_config.get("ms_sharepoint_plugin"):
|
||||
# a_classes.append("msoffice")
|
||||
# elif self.dir_config.get("ms_sharepoint_urls"):
|
||||
# href = "ms-{}:ofe|u|{}".format(officeType, href)
|
||||
|
||||
entry = {
|
||||
"href": href,
|
||||
"ofe_prefix": ofe_prefix,
|
||||
"a_class": " ".join(a_classes),
|
||||
"add_link_html": "".join(add_link_html),
|
||||
"tr_class": " ".join(tr_classes),
|
||||
"display_name": res.get_display_name(),
|
||||
"last_modified": res.get_last_modified(),
|
||||
|
@ -227,7 +245,7 @@ class WsgiDavDirBrowser(BaseMiddleware):
|
|||
dirInfoList.append(entry)
|
||||
#
|
||||
ignore_patterns = self.dir_config.get("ignore", [])
|
||||
if compat.is_basestring(ignore_patterns):
|
||||
if util.is_basestring(ignore_patterns):
|
||||
ignore_patterns = ignore_patterns.split(",")
|
||||
|
||||
ignored_list = []
|
||||
|
@ -275,7 +293,7 @@ class WsgiDavDirBrowser(BaseMiddleware):
|
|||
if "wsgidav.auth.user_name" in environ:
|
||||
context.update(
|
||||
{
|
||||
"is_authenticated": True,
|
||||
"is_authenticated": bool(environ.get("wsgidav.auth.user_name")),
|
||||
"user_name": (environ.get("wsgidav.auth.user_name") or "anonymous"),
|
||||
"realm": environ.get("wsgidav.auth.realm"),
|
||||
"user_roles": ", ".join(environ.get("wsgidav.auth.roles") or []),
|
||||
|
|
|
@ -19,16 +19,9 @@
|
|||
Index of {{ display_path }}
|
||||
</h1>
|
||||
|
||||
{% if config.davmount or config.ms_mount %}
|
||||
{% if config.davmount_links %}
|
||||
<p class="links">
|
||||
{%- if config.davmount %}
|
||||
<a title="Open this folder in a registered WebDAV client." href="{{ url }}?davmount">Mount</a>
|
||||
{% endif -%}
|
||||
{%- if config.ms_mount %}
|
||||
{% if config.davmount %} – {% endif %}
|
||||
<a title="Open as Web Folder (requires Microsoft Internet Explorer 7 or older!)"
|
||||
href="" folder="{{ url }}">Open as Web Folder</a>
|
||||
{% endif %}
|
||||
<a title="Open this folder in a registered WebDAV client." href="{{ url }}?davmount">Mount</a>
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
|
@ -82,6 +75,14 @@
|
|||
<a class="{{ row.a_class }}" href="{{row.href}}" {% if row.ofe_prefix %} data-ofe="{{row.ofe_prefix}}" {% endif %} >
|
||||
{{row.display_name}}
|
||||
</a>
|
||||
{%- if row.href_2 %}
|
||||
<a class="{{ row.a_class_2 }}" href="{{row.href_2}}" {% if row.ofe_prefix_2%} data-ofe="{{row.ofe_prefix_2}}" {% endif %} >
|
||||
{{row.display_name_2}}
|
||||
</a>
|
||||
{% endif -%}
|
||||
{%- if row.add_link_html %}
|
||||
{{row.add_link_html}}
|
||||
{% endif -%}
|
||||
</td>
|
||||
<td>{{ row.display_type }}</td>
|
||||
<td>{{ row.str_size }}</td>
|
||||
|
|
|
@ -20,7 +20,7 @@ import shutil
|
|||
import stat
|
||||
import sys
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_error import HTTP_FORBIDDEN, DAVError
|
||||
from wsgidav.dav_provider import DAVCollection, DAVNonCollection, DAVProvider
|
||||
|
||||
|
@ -46,7 +46,7 @@ class FileResource(DAVNonCollection):
|
|||
self.file_stat = os.stat(self._file_path)
|
||||
# Setting the name from the file path should fix the case on Windows
|
||||
self.name = os.path.basename(self._file_path)
|
||||
self.name = compat.to_native(self.name)
|
||||
self.name = util.to_str(self.name)
|
||||
|
||||
# Getter methods for standard live properties
|
||||
def get_content_length(self):
|
||||
|
@ -181,7 +181,7 @@ class FolderResource(DAVCollection):
|
|||
self.file_stat = os.stat(self._file_path)
|
||||
# Setting the name from the file path should fix the case on Windows
|
||||
self.name = os.path.basename(self._file_path)
|
||||
self.name = compat.to_native(self.name) # .encode("utf8")
|
||||
self.name = util.to_str(self.name) # .encode("utf8")
|
||||
|
||||
# Getter methods for standard live properties
|
||||
def get_creation_date(self):
|
||||
|
@ -213,20 +213,20 @@ class FolderResource(DAVCollection):
|
|||
|
||||
nameList = []
|
||||
# self._file_path is unicode, so os.listdir returns unicode as well
|
||||
assert compat.is_unicode(self._file_path)
|
||||
assert util.is_str(self._file_path)
|
||||
# if "temp" in self._file_path:
|
||||
# raise RuntimeError("Oops")
|
||||
for name in os.listdir(self._file_path):
|
||||
if not compat.is_unicode(name):
|
||||
if not util.is_str(name):
|
||||
name = name.decode(sys.getfilesystemencoding())
|
||||
assert compat.is_unicode(name)
|
||||
assert util.is_str(name)
|
||||
# Skip non files (links and mount points)
|
||||
fp = os.path.join(self._file_path, name)
|
||||
if not os.path.isdir(fp) and not os.path.isfile(fp):
|
||||
_logger.debug("Skipping non-file {!r}".format(fp))
|
||||
continue
|
||||
# name = name.encode("utf8")
|
||||
name = compat.to_native(name)
|
||||
name = util.to_str(name)
|
||||
nameList.append(name)
|
||||
return nameList
|
||||
|
||||
|
@ -235,8 +235,8 @@ class FolderResource(DAVCollection):
|
|||
|
||||
See DAVCollection.get_member()
|
||||
"""
|
||||
assert compat.is_native(name), "{!r}".format(name)
|
||||
fp = os.path.join(self._file_path, compat.to_unicode(name))
|
||||
assert util.is_str(name), "{!r}".format(name)
|
||||
fp = os.path.join(self._file_path, util.to_str(name))
|
||||
# name = name.encode("utf8")
|
||||
path = util.join_uri(self.path, name)
|
||||
if os.path.isdir(fp):
|
||||
|
@ -383,8 +383,8 @@ class FilesystemProvider(DAVProvider):
|
|||
"""
|
||||
root_path = self.root_folder_path
|
||||
assert root_path is not None
|
||||
assert compat.is_native(root_path)
|
||||
assert compat.is_native(path)
|
||||
assert util.is_str(root_path)
|
||||
assert util.is_str(path)
|
||||
|
||||
path_parts = path.strip("/").split("/")
|
||||
file_path = os.path.abspath(os.path.join(root_path, *path_parts))
|
||||
|
|
|
@ -77,6 +77,7 @@ The environ variable here is the WSGI 'environ' dictionary. It is passed to
|
|||
all methods of the domain controller as a means for developers to pass information
|
||||
from previous middleware or server config (if required).
|
||||
"""
|
||||
import base64
|
||||
import inspect
|
||||
import random
|
||||
import re
|
||||
|
@ -84,7 +85,8 @@ import time
|
|||
from hashlib import md5
|
||||
from textwrap import dedent
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_error import HTTP_NOT_FOUND, DAVError
|
||||
from wsgidav.dc.simple_dc import SimpleDomainController
|
||||
from wsgidav.middleware import BaseMiddleware
|
||||
from wsgidav.util import calc_base64, calc_hexdigest, dynamic_import_class
|
||||
|
@ -100,7 +102,7 @@ def make_domain_controller(wsgidav_app, config):
|
|||
if dc is True or not dc:
|
||||
# True or null:
|
||||
dc = SimpleDomainController
|
||||
elif compat.is_basestring(dc):
|
||||
elif util.is_basestring(dc):
|
||||
# If a plain string is passed, try to import it as class
|
||||
dc = dynamic_import_class(dc)
|
||||
|
||||
|
@ -275,7 +277,7 @@ class HTTPAuthenticator(BaseMiddleware):
|
|||
_logger.debug("401 Not Authorized for realm '{}' (basic)".format(realm))
|
||||
wwwauthheaders = 'Basic realm="{}"'.format(realm)
|
||||
|
||||
body = compat.to_bytes(self.error_message_401)
|
||||
body = util.to_bytes(self.error_message_401)
|
||||
start_response(
|
||||
"401 Not Authorized",
|
||||
[
|
||||
|
@ -296,8 +298,8 @@ class HTTPAuthenticator(BaseMiddleware):
|
|||
except Exception:
|
||||
auth_value = ""
|
||||
|
||||
auth_value = compat.base64_decodebytes(compat.to_bytes(auth_value))
|
||||
auth_value = compat.to_native(auth_value)
|
||||
auth_value = base64.decodebytes(util.to_bytes(auth_value))
|
||||
auth_value = util.to_str(auth_value)
|
||||
user_name, password = auth_value.split(":", 1)
|
||||
|
||||
if self.domain_controller.basic_auth_user(realm, user_name, password, environ):
|
||||
|
@ -334,7 +336,7 @@ class HTTPAuthenticator(BaseMiddleware):
|
|||
)
|
||||
)
|
||||
|
||||
body = compat.to_bytes(self.error_message_401)
|
||||
body = util.to_bytes(self.error_message_401)
|
||||
start_response(
|
||||
"401 Not Authorized",
|
||||
[
|
||||
|
@ -350,6 +352,12 @@ class HTTPAuthenticator(BaseMiddleware):
|
|||
|
||||
realm = self.domain_controller.get_domain_realm(environ["PATH_INFO"], environ)
|
||||
|
||||
if not realm:
|
||||
raise DAVError(
|
||||
HTTP_NOT_FOUND,
|
||||
context_info=f"Could not resolve realm for {environ['PATH_INFO']}",
|
||||
)
|
||||
|
||||
is_invalid_req = False
|
||||
invalid_req_reasons = []
|
||||
|
||||
|
@ -586,7 +594,7 @@ class HTTPAuthenticator(BaseMiddleware):
|
|||
"""
|
||||
|
||||
def md5h(data):
|
||||
return md5(compat.to_bytes(data)).hexdigest()
|
||||
return md5(util.to_bytes(data)).hexdigest()
|
||||
|
||||
def md5kd(secret, data):
|
||||
return md5h(secret + ":" + data)
|
||||
|
|
|
@ -40,7 +40,7 @@ import random
|
|||
import time
|
||||
from pprint import pformat
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_error import (
|
||||
HTTP_LOCKED,
|
||||
DAVError,
|
||||
|
@ -60,13 +60,13 @@ _logger = util.get_module_logger(__name__)
|
|||
|
||||
|
||||
def generate_lock_token():
|
||||
return "opaquelocktoken:" + compat.to_native(hex(random.getrandbits(256)))
|
||||
return "opaquelocktoken:" + util.to_str(hex(random.getrandbits(256)))
|
||||
|
||||
|
||||
def normalize_lock_root(path):
|
||||
# Normalize root: /foo/bar
|
||||
assert path
|
||||
path = compat.to_native(path)
|
||||
path = util.to_str(path)
|
||||
path = "/" + path.strip("/")
|
||||
return path
|
||||
|
||||
|
@ -100,18 +100,18 @@ def lock_string(lock_dict):
|
|||
|
||||
|
||||
def validate_lock(lock):
|
||||
assert compat.is_native(lock["root"])
|
||||
assert util.is_str(lock["root"])
|
||||
assert lock["root"].startswith("/")
|
||||
assert lock["type"] == "write"
|
||||
assert lock["scope"] in ("shared", "exclusive")
|
||||
assert lock["depth"] in ("0", "infinity")
|
||||
assert compat.is_bytes(lock["owner"]), lock # XML bytestring
|
||||
assert util.is_bytes(lock["owner"]), lock # XML bytestring
|
||||
# raises TypeError:
|
||||
timeout = float(lock["timeout"])
|
||||
assert timeout > 0 or timeout == -1, "timeout must be positive or -1"
|
||||
assert compat.is_native(lock["principal"])
|
||||
assert util.is_str(lock["principal"])
|
||||
if "token" in lock:
|
||||
assert compat.is_native(lock["token"])
|
||||
assert util.is_str(lock["token"])
|
||||
|
||||
|
||||
# ========================================================================
|
||||
|
@ -459,7 +459,7 @@ class LockManager(object):
|
|||
|
||||
@return: None or raise error
|
||||
"""
|
||||
assert compat.is_native(url)
|
||||
assert util.is_str(url)
|
||||
assert depth in ("0", "infinity")
|
||||
_logger.debug(
|
||||
"check_write_permission({}, {}, {}, {})".format(
|
||||
|
|
|
@ -15,7 +15,7 @@ import os
|
|||
import shelve
|
||||
import time
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.lock_manager import (
|
||||
generate_lock_token,
|
||||
lock_string,
|
||||
|
@ -301,7 +301,7 @@ class LockStorageDict(object):
|
|||
Returns:
|
||||
List of valid lock dictionaries (may be empty).
|
||||
"""
|
||||
assert compat.is_native(path)
|
||||
assert util.is_str(path)
|
||||
assert path and path.startswith("/")
|
||||
assert include_root or include_children
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ import time
|
|||
|
||||
import redis
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.lock_manager import (
|
||||
generate_lock_token,
|
||||
lock_string,
|
||||
|
@ -210,7 +210,7 @@ class LockStorageRedis(object):
|
|||
Returns:
|
||||
List of valid lock dictionaries (may be empty).
|
||||
"""
|
||||
assert compat.is_native(path)
|
||||
assert util.is_str(path)
|
||||
assert path and path.startswith("/")
|
||||
assert include_root or include_children
|
||||
|
||||
|
|
|
@ -22,13 +22,12 @@ Valid options are (sample shows defaults)::
|
|||
}
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from urllib.parse import quote
|
||||
from uuid import uuid4
|
||||
|
||||
import couchdb
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
|
@ -178,7 +177,7 @@ class CouchPropertyManager(object):
|
|||
doc = {
|
||||
"_id": uuid4().hex, # Documentation suggests to set the id
|
||||
"url": norm_url,
|
||||
"title": compat.quote(norm_url),
|
||||
"title": quote(norm_url),
|
||||
"type": "properties",
|
||||
"properties": {name: property_value},
|
||||
}
|
||||
|
@ -215,7 +214,7 @@ class CouchPropertyManager(object):
|
|||
doc2 = {
|
||||
"_id": uuid4().hex,
|
||||
"url": destUrl,
|
||||
"title": compat.quote(destUrl),
|
||||
"title": quote(destUrl),
|
||||
"type": "properties",
|
||||
"properties": doc["properties"],
|
||||
}
|
||||
|
|
|
@ -22,11 +22,11 @@ Valid options are (sample shows defaults)::
|
|||
}
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from urllib.parse import quote
|
||||
|
||||
import pymongo
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
|
@ -137,7 +137,7 @@ class MongoPropertyManager(object):
|
|||
|
||||
doc = self.collection.find_one({"_url": norm_url})
|
||||
if not doc:
|
||||
doc = {"_url": norm_url, "_title": compat.quote(norm_url)}
|
||||
doc = {"_url": norm_url, "_title": quote(norm_url)}
|
||||
doc[encode_mongo_key(name)] = property_value
|
||||
self.collection.save(doc)
|
||||
|
||||
|
|
|
@ -6,7 +6,9 @@
|
|||
"""
|
||||
WSGI application that handles one single WebDAV request.
|
||||
"""
|
||||
from wsgidav import compat, util, xml_tools
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
from wsgidav import util, xml_tools
|
||||
from wsgidav.dav_error import (
|
||||
HTTP_BAD_GATEWAY,
|
||||
HTTP_BAD_REQUEST,
|
||||
|
@ -672,7 +674,7 @@ class RequestServer(object):
|
|||
WORKAROUND_CHUNK_LENGTH = False
|
||||
buf = environ["wsgi.input"].readline()
|
||||
environ["wsgidav.some_input_read"] = 1
|
||||
if buf == compat.b_empty:
|
||||
if buf == b"":
|
||||
length = 0
|
||||
else:
|
||||
length = int(buf, 16)
|
||||
|
@ -684,14 +686,14 @@ class RequestServer(object):
|
|||
environ["wsgidav.some_input_read"] = 1
|
||||
# Keep receiving until we read expected size or reach
|
||||
# EOF
|
||||
if buf == compat.b_empty:
|
||||
if buf == b"":
|
||||
length = 0
|
||||
else:
|
||||
length -= len(buf)
|
||||
else:
|
||||
environ["wsgi.input"].readline()
|
||||
buf = environ["wsgi.input"].readline()
|
||||
if buf == compat.b_empty:
|
||||
if buf == b"":
|
||||
length = 0
|
||||
else:
|
||||
length = int(buf, 16)
|
||||
|
@ -922,7 +924,7 @@ class RequestServer(object):
|
|||
|
||||
# Destination header may be quoted (e.g. DAV Explorer sends unquoted,
|
||||
# Windows quoted)
|
||||
http_destination = compat.unquote(environ["HTTP_DESTINATION"])
|
||||
http_destination = unquote(environ["HTTP_DESTINATION"])
|
||||
|
||||
# Return fragments as part of <path>
|
||||
# Fixes litmus -> running `basic': 9. delete_fragment....... WARNING:
|
||||
|
@ -935,7 +937,7 @@ class RequestServer(object):
|
|||
_dest_params,
|
||||
_dest_query,
|
||||
_dest_frag,
|
||||
) = compat.urlparse(http_destination, allow_fragments=False)
|
||||
) = urlparse(http_destination, allow_fragments=False)
|
||||
|
||||
if src_res.is_collection:
|
||||
dest_path = dest_path.rstrip("/") + "/"
|
||||
|
@ -1278,7 +1280,7 @@ class RequestServer(object):
|
|||
|
||||
lock_type = None
|
||||
lock_scope = None
|
||||
lock_owner = compat.to_bytes("")
|
||||
lock_owner = util.to_bytes("")
|
||||
lock_depth = environ.setdefault("HTTP_DEPTH", "infinity")
|
||||
|
||||
for linode in lockinfo_el:
|
||||
|
@ -1668,7 +1670,7 @@ class RequestServer(object):
|
|||
readbuffer = fileobj.read(self.block_size)
|
||||
else:
|
||||
readbuffer = fileobj.read(contentlengthremaining)
|
||||
assert compat.is_bytes(readbuffer)
|
||||
assert util.is_bytes(readbuffer)
|
||||
yield readbuffer
|
||||
contentlengthremaining -= len(readbuffer)
|
||||
if len(readbuffer) == 0 or contentlengthremaining == 0:
|
||||
|
|
|
@ -7,7 +7,7 @@ Tools that make it easier to implement custom WsgiDAV providers.
|
|||
import os
|
||||
import stat
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_provider import DAVCollection, DAVNonCollection
|
||||
|
||||
__docformat__ = "reStructuredText en"
|
||||
|
@ -28,7 +28,7 @@ class VirtualCollection(DAVCollection):
|
|||
|
||||
def __init__(self, path, environ, display_info, member_name_list):
|
||||
super(VirtualCollection, self).__init__(path, environ)
|
||||
if compat.is_basestring(display_info):
|
||||
if util.is_basestring(display_info):
|
||||
display_info = {"type": display_info}
|
||||
assert type(display_info) is dict
|
||||
assert type(member_name_list) is list
|
||||
|
@ -125,10 +125,10 @@ class VirtualTextResource(_VirtualNonCollection):
|
|||
|
||||
# def get_ref_url(self):
|
||||
# refPath = "/by_key/%s/%s" % (self._data["key"], self.name)
|
||||
# return compat.quote(self.provider.share_path + refPath)
|
||||
# return quote(self.provider.share_path + refPath)
|
||||
|
||||
def get_content(self):
|
||||
return compat.StringIO(self.content)
|
||||
return util.StringIO(self.content)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
@ -171,7 +171,7 @@ class FileResource(_VirtualNonCollection):
|
|||
|
||||
# def get_ref_url(self):
|
||||
# refPath = "/by_key/%s/%s" % (self._data["key"], os.path.basename(self.file_path))
|
||||
# return compat.quote(self.provider.share_path + refPath)
|
||||
# return quote(self.provider.share_path + refPath)
|
||||
|
||||
def get_content(self):
|
||||
# mime = self.get_content_type()
|
||||
|
|
|
@ -71,15 +71,13 @@ Requirements:
|
|||
from here: http://mercurial.berkwood.com/
|
||||
http://mercurial.berkwood.com/binaries/mercurial-1.4.win32-py2.6.exe
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from hashlib import md5
|
||||
from pprint import pprint
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_error import HTTP_FORBIDDEN, DAVError
|
||||
from wsgidav.dav_provider import DAVProvider, _DAVResource
|
||||
from wsgidav.samples.dav_provider_tools import VirtualCollection
|
||||
|
@ -184,7 +182,7 @@ class HgResource(_DAVResource):
|
|||
return (
|
||||
md5(self.path).hexdigest()
|
||||
+ "-"
|
||||
+ compat.to_native(self.get_last_modified())
|
||||
+ util.to_str(self.get_last_modified())
|
||||
+ "-"
|
||||
+ str(self.get_content_length())
|
||||
)
|
||||
|
@ -200,7 +198,7 @@ class HgResource(_DAVResource):
|
|||
|
||||
def get_member_names(self):
|
||||
assert self.is_collection
|
||||
cache = self.environ["wsgidav.hg.cache"][compat.to_native(self.rev)]
|
||||
cache = self.environ["wsgidav.hg.cache"][util.to_str(self.rev)]
|
||||
dirinfos = cache["dirinfos"]
|
||||
if self.localHgPath not in dirinfos:
|
||||
return []
|
||||
|
@ -251,15 +249,15 @@ class HgResource(_DAVResource):
|
|||
return self.fctx.branch()
|
||||
elif name == "{hg:}date":
|
||||
# (secs, tz-ofs)
|
||||
return compat.to_native(self.fctx.date()[0])
|
||||
return util.to_str(self.fctx.date()[0])
|
||||
elif name == "{hg:}description":
|
||||
return self.fctx.description()
|
||||
elif name == "{hg:}filerev":
|
||||
return compat.to_native(self.fctx.filerev())
|
||||
return util.to_str(self.fctx.filerev())
|
||||
elif name == "{hg:}rev":
|
||||
return compat.to_native(self.fctx.rev())
|
||||
return util.to_str(self.fctx.rev())
|
||||
elif name == "{hg:}user":
|
||||
return compat.to_native(self.fctx.user())
|
||||
return util.to_str(self.fctx.user())
|
||||
|
||||
# Let base class implementation report live and dead properties
|
||||
return super(HgResource, self).get_property_value(name)
|
||||
|
@ -324,7 +322,7 @@ class HgResource(_DAVResource):
|
|||
"""
|
||||
assert not self.is_collection
|
||||
d = self.fctx.data()
|
||||
return compat.StringIO(d)
|
||||
return util.StringIO(d)
|
||||
|
||||
def begin_write(self, content_type=None):
|
||||
"""Open content as a stream for writing.
|
||||
|
@ -522,9 +520,9 @@ class HgResourceProvider(DAVProvider):
|
|||
}
|
||||
"""
|
||||
caches = environ.setdefault("wsgidav.hg.cache", {})
|
||||
if caches.get(compat.to_native(rev)) is not None:
|
||||
if caches.get(util.to_str(rev)) is not None:
|
||||
_logger.debug("_get_repo_info(%s): cache hit." % rev)
|
||||
return caches[compat.to_native(rev)]
|
||||
return caches[util.to_str(rev)]
|
||||
|
||||
start_time = time.time()
|
||||
self.ui.pushbuffer()
|
||||
|
@ -555,7 +553,7 @@ class HgResourceProvider(DAVProvider):
|
|||
files.sort()
|
||||
|
||||
cache = {"files": files, "dirinfos": dirinfos, "filedict": filedict}
|
||||
caches[compat.to_native(rev)] = cache
|
||||
caches[util.to_str(rev)] = cache
|
||||
_logger.info("_getRepoInfo(%s) took %.3f" % (rev, time.time() - start_time))
|
||||
return cache
|
||||
|
||||
|
@ -598,7 +596,7 @@ class HgResourceProvider(DAVProvider):
|
|||
if rest == "/":
|
||||
# Browse /archive: return a list of revision folders:
|
||||
loglist = self._get_log(limit=10)
|
||||
members = [compat.to_native(m["local_id"]) for m in loglist]
|
||||
members = [util.to_str(m["local_id"]) for m in loglist]
|
||||
return VirtualCollection(path, environ, "Revisions", members)
|
||||
revid, rest = util.pop_path(rest)
|
||||
try:
|
||||
|
|
|
@ -24,12 +24,13 @@ Valid options are (sample shows defaults)::
|
|||
}
|
||||
|
||||
"""
|
||||
from io import StringIO
|
||||
from pprint import pformat
|
||||
|
||||
import pymongo
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_provider import DAVCollection, DAVNonCollection, DAVProvider
|
||||
from wsgidav.util import join_uri
|
||||
|
||||
|
@ -88,7 +89,7 @@ class CollCollection(DAVCollection):
|
|||
def get_member_names(self):
|
||||
res = []
|
||||
for doc in self.coll.find():
|
||||
res.append(compat.to_native(doc["_id"]))
|
||||
res.append(util.to_str(doc["_id"]))
|
||||
return res
|
||||
|
||||
def get_member(self, name):
|
||||
|
@ -105,7 +106,7 @@ class DocResource(DAVNonCollection):
|
|||
|
||||
def get_content(self):
|
||||
html = "<pre>" + pformat(self.doc) + "</pre>"
|
||||
return compat.StringIO(html.encode("utf8"))
|
||||
return StringIO(html.encode("utf8"))
|
||||
|
||||
def get_content_length(self):
|
||||
return len(self.get_content().read())
|
||||
|
@ -120,8 +121,8 @@ class DocResource(DAVNonCollection):
|
|||
elif doc.get("title"):
|
||||
return doc["title"].encode("utf8")
|
||||
elif doc.get("_id"):
|
||||
return compat.to_native(doc["_id"])
|
||||
return compat.to_native(doc["key"])
|
||||
return util.to_str(doc["_id"])
|
||||
return util.to_str(doc["key"])
|
||||
|
||||
def get_display_info(self):
|
||||
return {"type": "Mongo document"}
|
||||
|
|
|
@ -57,15 +57,14 @@ its limitations:
|
|||
unnecessary queries to the database.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import csv
|
||||
import hashlib
|
||||
import time
|
||||
from io import StringIO
|
||||
|
||||
import MySQLdb # @UnresolvedImport
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_error import (
|
||||
HTTP_FORBIDDEN,
|
||||
DAVError,
|
||||
|
@ -214,7 +213,7 @@ class MySQLBrowserResource(_DAVResource):
|
|||
|
||||
See DAVResource.get_content()
|
||||
"""
|
||||
filestream = compat.StringIO()
|
||||
filestream = StringIO()
|
||||
|
||||
tableName, primKey = self.provider._split_path(self.path)
|
||||
if primKey is not None:
|
||||
|
@ -476,7 +475,7 @@ class MySQLBrowserProvider(DAVProvider):
|
|||
if row is None:
|
||||
cursor.close()
|
||||
return None
|
||||
val = compat.to_native(row[field_name])
|
||||
val = util.to_str(row[field_name])
|
||||
cursor.close()
|
||||
return val
|
||||
|
||||
|
@ -527,7 +526,7 @@ class MySQLBrowserProvider(DAVProvider):
|
|||
cursor.close()
|
||||
return None
|
||||
for fname in row.keys():
|
||||
dictRet[fname] = compat.to_native(row[fname])
|
||||
dictRet[fname] = util.to_str(row[fname])
|
||||
cursor.close()
|
||||
return dictRet
|
||||
|
||||
|
@ -553,7 +552,7 @@ class MySQLBrowserProvider(DAVProvider):
|
|||
cursor.execute("SELECT " + field_name + " FROM " + self._db + "." + table_name)
|
||||
result_set = cursor.fetchall()
|
||||
for row in result_set:
|
||||
retlist.append(compat.to_native(row[field_name]))
|
||||
retlist.append(util.to_str(row[field_name]))
|
||||
cursor.close()
|
||||
return retlist
|
||||
|
||||
|
|
|
@ -97,8 +97,10 @@ When accessed using WebDAV, the following URLs both return the same resource
|
|||
"""
|
||||
import os
|
||||
import stat
|
||||
from io import BytesIO
|
||||
from urllib.parse import quote
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
from wsgidav.dav_error import (
|
||||
HTTP_FORBIDDEN,
|
||||
HTTP_INTERNAL_ERROR,
|
||||
|
@ -149,7 +151,7 @@ _resourceData = [
|
|||
},
|
||||
{
|
||||
"key": "3",
|
||||
"title": u"My doc (euro:\u20AC, uuml:<3A><>)".encode("utf8"),
|
||||
"title": "My doc (euro:\u20AC, uuml:<3A><>)".encode("utf8"),
|
||||
"orga": "marketing",
|
||||
"tags": ["nice"],
|
||||
"status": "published",
|
||||
|
@ -349,7 +351,7 @@ class VirtualResource(DAVCollection):
|
|||
|
||||
def get_ref_url(self):
|
||||
refPath = "/by_key/%s" % self.data["key"]
|
||||
return compat.quote(self.provider.share_path + refPath)
|
||||
return quote(self.provider.share_path + refPath)
|
||||
|
||||
def get_property_names(self, is_allprop):
|
||||
"""Return list of supported property names in Clark Notation.
|
||||
|
@ -479,7 +481,7 @@ class VirtualArtifact(_VirtualNonCollection):
|
|||
|
||||
def get_ref_url(self):
|
||||
refPath = "/by_key/%s/%s" % (self.data["key"], self.name)
|
||||
return compat.quote(self.provider.share_path + refPath)
|
||||
return quote(self.provider.share_path + refPath)
|
||||
|
||||
def get_content(self):
|
||||
fileLinks = [
|
||||
|
@ -536,7 +538,7 @@ class VirtualArtifact(_VirtualNonCollection):
|
|||
html = self.data["description"]
|
||||
else:
|
||||
raise DAVError(HTTP_INTERNAL_ERROR, "Invalid artifact '%s'" % self.name)
|
||||
return compat.BytesIO(compat.to_bytes(html))
|
||||
return BytesIO(util.to_bytes(html))
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
@ -578,7 +580,7 @@ class VirtualResFile(_VirtualNonCollection):
|
|||
|
||||
def get_ref_url(self):
|
||||
refPath = "/by_key/%s/%s" % (self.data["key"], os.path.basename(self.file_path))
|
||||
return compat.quote(self.provider.share_path + refPath)
|
||||
return quote(self.provider.share_path + refPath)
|
||||
|
||||
def get_content(self):
|
||||
# mime = self.get_content_type()
|
||||
|
|
|
@ -53,33 +53,21 @@ can copy ``ext_wsgi_server.py`` to ``<Paste-installation>/paste/servers`` and us
|
|||
run the application by specifying ``server='ext_wsgiutils'`` in the ``server.conf`` or appropriate
|
||||
paste configuration.
|
||||
"""
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
import logging
|
||||
import socket
|
||||
import socketserver
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
from http import client as http_client
|
||||
from http import server as BaseHTTPServer
|
||||
from io import StringIO
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from wsgidav import __version__, compat, util
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
|
||||
try:
|
||||
from http import client as http_client # py3
|
||||
except ImportError:
|
||||
import httplib as http_client
|
||||
|
||||
try:
|
||||
from http import server as BaseHTTPServer # py3
|
||||
except ImportError:
|
||||
import BaseHTTPServer
|
||||
|
||||
try:
|
||||
import socketserver # py3
|
||||
except ImportError:
|
||||
import SocketServer as socketserver
|
||||
|
||||
from wsgidav import __version__, util
|
||||
|
||||
_logger = util.get_module_logger(__name__)
|
||||
|
||||
|
@ -121,11 +109,10 @@ class ExtHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
# Enable automatic keep-alive:
|
||||
protocol_version = "HTTP/1.1"
|
||||
|
||||
server_version = "WsgiDAV/{} ExtServer/{} {} Python {}".format(
|
||||
server_version = "WsgiDAV/{} ExtServer/{} {}".format(
|
||||
__version__,
|
||||
_version,
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.server_version,
|
||||
util.PYTHON_VERSION,
|
||||
)
|
||||
|
||||
def log_message(self, *args):
|
||||
|
@ -140,7 +127,7 @@ class ExtHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
|
||||
def getApp(self):
|
||||
# We want fragments to be returned as part of <path>
|
||||
_protocol, _host, path, _parameters, query, _fragment = compat.urlparse(
|
||||
_protocol, _host, path, _parameters, query, _fragment = urlparse(
|
||||
"http://dummyhost{}".format(self.path), allow_fragments=False
|
||||
)
|
||||
# Find any application we might have
|
||||
|
@ -204,7 +191,7 @@ class ExtHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
"CONTENT_LENGTH": self.headers.get("Content-Length", ""),
|
||||
"REMOTE_ADDR": self.client_address[0],
|
||||
"SERVER_NAME": self.server.server_address[0],
|
||||
"SERVER_PORT": compat.to_native(self.server.server_address[1]),
|
||||
"SERVER_PORT": util.to_str(self.server.server_address[1]),
|
||||
"SERVER_PROTOCOL": self.request_version,
|
||||
}
|
||||
for httpHeader, httpValue in self.headers.items():
|
||||
|
@ -231,7 +218,7 @@ class ExtHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
result.close()
|
||||
except Exception:
|
||||
_logger.debug("runWSGIApp caught exception...")
|
||||
errorMsg = compat.StringIO()
|
||||
errorMsg = StringIO()
|
||||
traceback.print_exc(file=errorMsg)
|
||||
logging.error(errorMsg.getvalue())
|
||||
if not self.wsgiSentHeaders:
|
||||
|
@ -274,16 +261,16 @@ class ExtHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
self.end_headers()
|
||||
self.wsgiSentHeaders = 1
|
||||
# Send the data
|
||||
# assert type(data) is str # If not, Content-Length is propably wrong!
|
||||
assert type(data) is bytes # If not, Content-Length is propably wrong!
|
||||
_logger.debug(
|
||||
"wsgiWriteData: write {} bytes: '{!r}'...".format(
|
||||
len(data), compat.to_native(data[:50])
|
||||
len(data), util.to_str(data[:50])
|
||||
)
|
||||
)
|
||||
if compat.is_unicode(data): # If not, Content-Length is propably wrong!
|
||||
if util.is_str(data): # If not, Content-Length is propably wrong!
|
||||
_logger.info("ext_wsgiutils_server: Got unicode data: {!r}".format(data))
|
||||
# data = compat.wsgi_to_bytes(data)
|
||||
data = compat.to_bytes(data)
|
||||
# data = util.wsgi_to_bytes(data)
|
||||
data = util.to_bytes(data)
|
||||
|
||||
try:
|
||||
self.wfile.write(data)
|
||||
|
@ -292,7 +279,7 @@ class ExtHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
# 10053: Software caused connection abort
|
||||
# 10054: Connection reset by peer
|
||||
if e.args[0] in (10053, 10054):
|
||||
_logger.info("*** Caught socket.error: ", e, file=sys.stderr)
|
||||
_logger.info("*** Caught socket.error: %s", e, file=sys.stderr)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
@ -395,7 +382,7 @@ def serve(conf, app):
|
|||
server = ExtServer((host, port), {"": app})
|
||||
server_version = ExtHandler.server_version
|
||||
if conf.get("verbose") >= 1:
|
||||
_logger.info("Running {}".format(server_version))
|
||||
_logger.info(f"Running {server_version}")
|
||||
if host in ("", "0.0.0.0"):
|
||||
(hostname, _aliaslist, ipaddrlist) = socket.gethostbyname_ex(
|
||||
socket.gethostname()
|
||||
|
@ -408,9 +395,7 @@ def serve(conf, app):
|
|||
else:
|
||||
_logger.info("Serving at {}, port {}...".format(host, port))
|
||||
server.serve_forever()
|
||||
|
||||
|
||||
# server.serve_forever_stoppable()
|
||||
# server.serve_forever_stoppable()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
Wrapper for ``server_cli``, that restarts the server when source code is
|
||||
modified.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
from subprocess import Popen
|
||||
|
|
|
@ -32,8 +32,6 @@ Configuration is defined like this:
|
|||
``--root=FOLDER`` option creates a FilesystemProvider that publishes
|
||||
FOLDER on the '/' share.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import io
|
||||
|
@ -42,6 +40,7 @@ import os
|
|||
import platform
|
||||
import sys
|
||||
import traceback
|
||||
import webbrowser
|
||||
from inspect import isfunction
|
||||
from pprint import pformat
|
||||
from threading import Timer
|
||||
|
@ -86,6 +85,38 @@ def _get_checked_path(path, config, must_exist=True, allow_none=True):
|
|||
return path
|
||||
|
||||
|
||||
def _get_common_info(config):
|
||||
"""Calculate some common info."""
|
||||
# Support SSL
|
||||
ssl_certificate = _get_checked_path(config.get("ssl_certificate"), config)
|
||||
ssl_private_key = _get_checked_path(config.get("ssl_private_key"), config)
|
||||
ssl_certificate_chain = _get_checked_path(
|
||||
config.get("ssl_certificate_chain"), config
|
||||
)
|
||||
ssl_adapter = config.get("ssl_adapter", "builtin")
|
||||
use_ssl = False
|
||||
if ssl_certificate and ssl_private_key:
|
||||
use_ssl = True
|
||||
# _logger.info("SSL / HTTPS enabled. Adapter: {}".format(ssl_adapter))
|
||||
elif ssl_certificate or ssl_private_key:
|
||||
raise RuntimeError(
|
||||
"Option 'ssl_certificate' and 'ssl_private_key' must be used together."
|
||||
)
|
||||
|
||||
protocol = "https" if use_ssl else "http"
|
||||
url = f"{protocol}://{config['host']}:{config['port']}"
|
||||
info = {
|
||||
"use_ssl": use_ssl,
|
||||
"ssl_cert": ssl_certificate,
|
||||
"ssl_pk": ssl_private_key,
|
||||
"ssl_adapter": ssl_adapter,
|
||||
"ssl_chain": ssl_certificate_chain,
|
||||
"protocol": protocol,
|
||||
"url": url,
|
||||
}
|
||||
return info
|
||||
|
||||
|
||||
class FullExpandedPath(argparse.Action):
|
||||
"""Expand user- and relative-paths"""
|
||||
|
||||
|
@ -96,7 +127,6 @@ class FullExpandedPath(argparse.Action):
|
|||
|
||||
def _init_command_line_options():
|
||||
"""Parse command line options into a dictionary."""
|
||||
|
||||
description = """\
|
||||
|
||||
Run a WEBDAV server to share file system folders.
|
||||
|
@ -126,13 +156,12 @@ See https://github.com/mar10/wsgidav for additional information.
|
|||
prog="wsgidav",
|
||||
description=description,
|
||||
epilog=epilog,
|
||||
# allow_abbrev=False, # Py3.5+
|
||||
allow_abbrev=False,
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--port",
|
||||
dest="port",
|
||||
type=int,
|
||||
# default=8080,
|
||||
help="port to serve on (default: 8080)",
|
||||
|
@ -140,7 +169,6 @@ See https://github.com/mar10/wsgidav for additional information.
|
|||
parser.add_argument(
|
||||
"-H", # '-h' conflicts with --help
|
||||
"--host",
|
||||
dest="host",
|
||||
help=(
|
||||
"host to serve from (default: localhost). 'localhost' is only "
|
||||
"accessible from the local computer. Use 0.0.0.0 to make your "
|
||||
|
@ -193,16 +221,20 @@ See https://github.com/mar10/wsgidav for additional information.
|
|||
dest="config_file",
|
||||
action=FullExpandedPath,
|
||||
help=(
|
||||
"configuration file (default: {} in current directory)".format(
|
||||
DEFAULT_CONFIG_FILES
|
||||
)
|
||||
f"configuration file (default: {DEFAULT_CONFIG_FILES} in current directory)"
|
||||
),
|
||||
)
|
||||
|
||||
qv_group.add_argument(
|
||||
"--no-config",
|
||||
action="store_true",
|
||||
dest="no_config",
|
||||
help="do not try to load default {}".format(DEFAULT_CONFIG_FILES),
|
||||
help=f"do not try to load default {DEFAULT_CONFIG_FILES}",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--browse",
|
||||
action="store_true",
|
||||
help="open browser on start",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
|
@ -219,7 +251,7 @@ See https://github.com/mar10/wsgidav for additional information.
|
|||
|
||||
if args.root_path and not os.path.isdir(args.root_path):
|
||||
msg = "{} is not a directory".format(args.root_path)
|
||||
raise parser.error(msg)
|
||||
parser.error(msg)
|
||||
|
||||
if args.version:
|
||||
if args.verbose >= 4:
|
||||
|
@ -266,7 +298,7 @@ See https://github.com/mar10/wsgidav for additional information.
|
|||
return cmdLineOpts, parser
|
||||
|
||||
|
||||
def _read_config_file(config_file, verbose):
|
||||
def _read_config_file(config_file, _verbose):
|
||||
"""Read configuration file options into a dictionary."""
|
||||
|
||||
config_file = os.path.abspath(config_file)
|
||||
|
@ -428,274 +460,42 @@ def _init_config():
|
|||
# import pydevd
|
||||
# pydevd.settrace()
|
||||
|
||||
return config
|
||||
return cli_opts, config
|
||||
|
||||
|
||||
def _run_paste(app, config, mode):
|
||||
"""Run WsgiDAV using paste.httpserver, if Paste is installed.
|
||||
|
||||
See http://pythonpaste.org/modules/httpserver.html for more options
|
||||
"""
|
||||
from paste import httpserver
|
||||
|
||||
version = "WsgiDAV/{} {} Python {}".format(
|
||||
__version__, httpserver.WSGIHandler.server_version, util.PYTHON_VERSION
|
||||
)
|
||||
_logger.info("Running {}...".format(version))
|
||||
|
||||
# See http://pythonpaste.org/modules/httpserver.html for more options
|
||||
server = httpserver.serve(
|
||||
app,
|
||||
host=config["host"],
|
||||
port=config["port"],
|
||||
server_version=version,
|
||||
# This option enables handling of keep-alive
|
||||
# and expect-100:
|
||||
protocol_version="HTTP/1.1",
|
||||
start_loop=False,
|
||||
)
|
||||
|
||||
if config["verbose"] >= 5:
|
||||
__handle_one_request = server.RequestHandlerClass.handle_one_request
|
||||
|
||||
def handle_one_request(self):
|
||||
__handle_one_request(self)
|
||||
if self.close_connection == 1:
|
||||
_logger.debug("HTTP Connection : close")
|
||||
else:
|
||||
_logger.debug("HTTP Connection : continue")
|
||||
|
||||
server.RequestHandlerClass.handle_one_request = handle_one_request
|
||||
|
||||
# __handle = server.RequestHandlerClass.handle
|
||||
|
||||
# def handle(self):
|
||||
# _logger.debug("open HTTP connection")
|
||||
# __handle(self)
|
||||
|
||||
server.RequestHandlerClass.handle_one_request = handle_one_request
|
||||
|
||||
host, port = server.server_address
|
||||
if host == "0.0.0.0":
|
||||
_logger.info(
|
||||
"Serving on 0.0.0.0:{} view at {}://127.0.0.1:{}".format(port, "http", port)
|
||||
)
|
||||
else:
|
||||
_logger.info("Serving on {}://{}:{}".format("http", host, port))
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
return
|
||||
|
||||
|
||||
def _run_gevent(app, config, mode):
|
||||
"""Run WsgiDAV using gevent if gevent is installed.
|
||||
|
||||
See
|
||||
https://github.com/gevent/gevent/blob/master/src/gevent/pywsgi.py#L1356
|
||||
https://github.com/gevent/gevent/blob/master/src/gevent/server.py#L38
|
||||
for more options
|
||||
"""
|
||||
import gevent
|
||||
import gevent.monkey
|
||||
|
||||
gevent.monkey.patch_all()
|
||||
from gevent.pywsgi import WSGIServer
|
||||
|
||||
server_args = {"bind_addr": (config["host"], config["port"]), "wsgi_app": app}
|
||||
|
||||
server_name = "WsgiDAV/{} gevent/{} Python/{}".format(
|
||||
__version__, gevent.__version__, util.PYTHON_VERSION
|
||||
)
|
||||
|
||||
# Support SSL
|
||||
ssl_certificate = _get_checked_path(config.get("ssl_certificate"), config)
|
||||
ssl_private_key = _get_checked_path(config.get("ssl_private_key"), config)
|
||||
ssl_certificate_chain = _get_checked_path(
|
||||
config.get("ssl_certificate_chain"), config
|
||||
)
|
||||
|
||||
# Override or add custom args
|
||||
server_args.update(config.get("server_args", {}))
|
||||
|
||||
protocol = "http"
|
||||
if ssl_certificate:
|
||||
assert ssl_private_key
|
||||
protocol = "https"
|
||||
_logger.info("SSL / HTTPS enabled.")
|
||||
dav_server = WSGIServer(
|
||||
server_args["bind_addr"],
|
||||
app,
|
||||
keyfile=ssl_private_key,
|
||||
certfile=ssl_certificate,
|
||||
ca_certs=ssl_certificate_chain,
|
||||
)
|
||||
|
||||
else:
|
||||
dav_server = WSGIServer(server_args["bind_addr"], app)
|
||||
|
||||
# If the caller passed a startup event, monkey patch the server to set it
|
||||
# when the request handler loop is entered
|
||||
startup_event = config.get("startup_event")
|
||||
if startup_event:
|
||||
|
||||
def _patched_start():
|
||||
dav_server.start_accepting = org_start # undo the monkey patch
|
||||
org_start()
|
||||
_logger.info("gevent is ready")
|
||||
startup_event.set()
|
||||
|
||||
org_start = dav_server.start_accepting
|
||||
dav_server.start_accepting = _patched_start
|
||||
|
||||
_logger.info("Running {}".format(server_name))
|
||||
_logger.info(
|
||||
"Serving on {}://{}:{} ...".format(protocol, config["host"], config["port"])
|
||||
)
|
||||
try:
|
||||
gevent.spawn(dav_server.serve_forever())
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
return
|
||||
|
||||
|
||||
def _run__cherrypy(app, config, mode):
|
||||
"""Run WsgiDAV using cherrypy.wsgiserver if CherryPy is installed."""
|
||||
assert mode == "cherrypy-wsgiserver"
|
||||
|
||||
try:
|
||||
from cherrypy import wsgiserver
|
||||
from cherrypy.wsgiserver.ssl_builtin import BuiltinSSLAdapter
|
||||
|
||||
_logger.warning("WARNING: cherrypy.wsgiserver is deprecated.")
|
||||
_logger.warning(
|
||||
" Starting with CherryPy 9.0 the functionality from cherrypy.wsgiserver"
|
||||
)
|
||||
_logger.warning(" was moved to the cheroot project.")
|
||||
_logger.warning(" Consider using --server=cheroot.")
|
||||
except ImportError:
|
||||
_logger.error("*" * 78)
|
||||
_logger.error("ERROR: Could not import cherrypy.wsgiserver.")
|
||||
_logger.error(
|
||||
"Try `pip install cherrypy` or specify another server using the --server option."
|
||||
)
|
||||
_logger.error("Note that starting with CherryPy 9.0, the server was moved to")
|
||||
_logger.error(
|
||||
"the cheroot project, so it is recommended to use `-server=cheroot`"
|
||||
)
|
||||
_logger.error("and run `pip install cheroot` instead.")
|
||||
_logger.error("*" * 78)
|
||||
raise
|
||||
|
||||
server_name = "WsgiDAV/{} {} Python/{}".format(
|
||||
__version__, wsgiserver.CherryPyWSGIServer.version, util.PYTHON_VERSION
|
||||
)
|
||||
wsgiserver.CherryPyWSGIServer.version = server_name
|
||||
|
||||
# Support SSL
|
||||
ssl_certificate = _get_checked_path(config.get("ssl_certificate"), config)
|
||||
ssl_private_key = _get_checked_path(config.get("ssl_private_key"), config)
|
||||
ssl_certificate_chain = _get_checked_path(
|
||||
config.get("ssl_certificate_chain"), config
|
||||
)
|
||||
protocol = "http"
|
||||
if ssl_certificate:
|
||||
assert ssl_private_key
|
||||
wsgiserver.CherryPyWSGIServer.ssl_adapter = BuiltinSSLAdapter(
|
||||
ssl_certificate, ssl_private_key, ssl_certificate_chain
|
||||
)
|
||||
protocol = "https"
|
||||
_logger.info("SSL / HTTPS enabled.")
|
||||
|
||||
_logger.info("Running {}".format(server_name))
|
||||
_logger.info(
|
||||
"Serving on {}://{}:{} ...".format(protocol, config["host"], config["port"])
|
||||
)
|
||||
|
||||
server_args = {
|
||||
"bind_addr": (config["host"], config["port"]),
|
||||
"wsgi_app": app,
|
||||
"server_name": server_name,
|
||||
}
|
||||
# Override or add custom args
|
||||
server_args.update(config.get("server_args", {}))
|
||||
|
||||
server = wsgiserver.CherryPyWSGIServer(**server_args)
|
||||
|
||||
# If the caller passed a startup event, monkey patch the server to set it
|
||||
# when the request handler loop is entered
|
||||
startup_event = config.get("startup_event")
|
||||
if startup_event:
|
||||
|
||||
def _patched_tick():
|
||||
server.tick = org_tick # undo the monkey patch
|
||||
org_tick()
|
||||
_logger.info("CherryPyWSGIServer is ready")
|
||||
startup_event.set()
|
||||
|
||||
org_tick = server.tick
|
||||
server.tick = _patched_tick
|
||||
|
||||
try:
|
||||
server.start()
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
finally:
|
||||
server.stop()
|
||||
return
|
||||
|
||||
|
||||
def _run_cheroot(app, config, mode):
|
||||
"""Run WsgiDAV using cheroot.server if Cheroot is installed."""
|
||||
assert mode == "cheroot"
|
||||
def _run_cheroot(app, config, _server):
|
||||
"""Run WsgiDAV using cheroot.server (https://cheroot.cherrypy.dev/)."""
|
||||
try:
|
||||
from cheroot import server, wsgi
|
||||
except ImportError:
|
||||
_logger.error("*" * 78)
|
||||
_logger.error("ERROR: Could not import Cheroot.")
|
||||
_logger.error(
|
||||
"Try `pip install cheroot` or specify another server using the --server option."
|
||||
)
|
||||
_logger.error("*" * 78)
|
||||
raise
|
||||
_logger.exception("Could not import Cheroot (https://cheroot.cherrypy.dev/).")
|
||||
_logger.error("Try `pip install cheroot`.")
|
||||
return False
|
||||
|
||||
server_name = "WsgiDAV/{} {} Python/{}".format(
|
||||
__version__, wsgi.Server.version, util.PYTHON_VERSION
|
||||
)
|
||||
wsgi.Server.version = server_name
|
||||
version = wsgi.Server.version
|
||||
version = f"WsgiDAV/{__version__} {version} Python {util.PYTHON_VERSION}"
|
||||
wsgi.Server.version = version
|
||||
|
||||
info = _get_common_info(config)
|
||||
|
||||
# Support SSL
|
||||
ssl_certificate = _get_checked_path(config.get("ssl_certificate"), config)
|
||||
ssl_private_key = _get_checked_path(config.get("ssl_private_key"), config)
|
||||
ssl_certificate_chain = _get_checked_path(
|
||||
config.get("ssl_certificate_chain"), config
|
||||
)
|
||||
ssl_adapter = config.get("ssl_adapter", "builtin")
|
||||
protocol = "http"
|
||||
if ssl_certificate and ssl_private_key:
|
||||
if info["use_ssl"]:
|
||||
ssl_adapter = info["ssl_adapter"]
|
||||
ssl_adapter = server.get_ssl_adapter_class(ssl_adapter)
|
||||
wsgi.Server.ssl_adapter = ssl_adapter(
|
||||
ssl_certificate, ssl_private_key, ssl_certificate_chain
|
||||
info["ssl_cert"], info["ssl_pk"], info["ssl_chain"]
|
||||
)
|
||||
protocol = "https"
|
||||
_logger.info("SSL / HTTPS enabled. Adapter: {}".format(ssl_adapter))
|
||||
elif ssl_certificate or ssl_private_key:
|
||||
raise RuntimeError(
|
||||
"Option 'ssl_certificate' and 'ssl_private_key' must be used together."
|
||||
)
|
||||
|
||||
_logger.info("Running {}".format(server_name))
|
||||
_logger.info(
|
||||
"Serving on {}://{}:{} ...".format(protocol, config["host"], config["port"])
|
||||
)
|
||||
_logger.info(f"Running {version}")
|
||||
_logger.info(f"Serving on {info['url']} ...")
|
||||
|
||||
server_args = {
|
||||
"bind_addr": (config["host"], config["port"]),
|
||||
"wsgi_app": app,
|
||||
"server_name": server_name,
|
||||
"server_name": version,
|
||||
# File Explorer needs lot of threads (see issue #149):
|
||||
"numthreads": 50,
|
||||
"numthreads": 50, # TODO: still required?
|
||||
}
|
||||
# Override or add custom args
|
||||
server_args.update(config.get("server_args", {}))
|
||||
|
@ -715,16 +515,6 @@ def _run_cheroot(app, config, mode):
|
|||
startup_event = config.get("startup_event")
|
||||
if startup_event:
|
||||
server = PatchedServer(**server_args)
|
||||
|
||||
# issue #200: The `server.tick()` method was dropped with cheroot 8.5
|
||||
# def _patched_tick():
|
||||
# server.tick = org_tick # undo the monkey patch
|
||||
# _logger.info("wsgi.Server is ready (pre Cheroot 8.5")
|
||||
# startup_event.set()
|
||||
# org_tick()
|
||||
#
|
||||
# org_tick = server.tick
|
||||
# server.tick = _patched_tick
|
||||
else:
|
||||
server = wsgi.Server(**server_args)
|
||||
|
||||
|
@ -738,60 +528,10 @@ def _run_cheroot(app, config, mode):
|
|||
return
|
||||
|
||||
|
||||
def _run_flup(app, config, mode):
|
||||
"""Run WsgiDAV using flup.server.fcgi if Flup is installed."""
|
||||
# http://trac.saddi.com/flup/wiki/FlupServers
|
||||
if mode == "flup-fcgi":
|
||||
from flup.server.fcgi import WSGIServer
|
||||
from flup.server.fcgi import __version__ as flupver
|
||||
elif mode == "flup-fcgi-fork":
|
||||
from flup.server.fcgi_fork import WSGIServer
|
||||
from flup.server.fcgi_fork import __version__ as flupver
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
_logger.info(
|
||||
"Running WsgiDAV/{} {}/{}...".format(
|
||||
__version__, WSGIServer.__module__, flupver
|
||||
)
|
||||
)
|
||||
server = WSGIServer(
|
||||
app,
|
||||
bindAddress=(config["host"], config["port"]),
|
||||
# debug=True,
|
||||
)
|
||||
try:
|
||||
server.run()
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
return
|
||||
|
||||
|
||||
def _run_wsgiref(app, config, mode):
|
||||
"""Run WsgiDAV using wsgiref.simple_server, on Python 2.5+."""
|
||||
# http://www.python.org/doc/2.5.2/lib/module-wsgiref.html
|
||||
from wsgiref.simple_server import make_server, software_version
|
||||
|
||||
version = "WsgiDAV/{} {}".format(__version__, software_version)
|
||||
_logger.info("Running {}...".format(version))
|
||||
_logger.warning(
|
||||
"WARNING: This single threaded server (wsgiref) is not meant for production."
|
||||
)
|
||||
httpd = make_server(config["host"], config["port"], app)
|
||||
try:
|
||||
httpd.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
return
|
||||
|
||||
|
||||
def _run_ext_wsgiutils(app, config, mode):
|
||||
def _run_ext_wsgiutils(app, config, _server):
|
||||
"""Run WsgiDAV using ext_wsgiutils_server from the wsgidav package."""
|
||||
from wsgidav.server import ext_wsgiutils_server
|
||||
|
||||
_logger.info(
|
||||
"Running WsgiDAV {} on wsgidav.ext_wsgiutils_server...".format(__version__)
|
||||
)
|
||||
_logger.warning(
|
||||
"WARNING: This single threaded server (ext-wsgiutils) is not meant for production."
|
||||
)
|
||||
|
@ -802,9 +542,112 @@ def _run_ext_wsgiutils(app, config, mode):
|
|||
return
|
||||
|
||||
|
||||
def _run_gunicorn(app, config, mode):
|
||||
"""Run WsgiDAV using gunicorn if gunicorn is installed."""
|
||||
import gunicorn.app.base
|
||||
# def _run_flup(app, config, server):
|
||||
# """Run WsgiDAV using flup.server.fcgi (http://trac.saddi.com/flup/wiki/FlupServers)."""
|
||||
# try:
|
||||
# if server == "flup-fcgi":
|
||||
# from flup.server.fcgi import WSGIServer
|
||||
# from flup.server.fcgi import __version__ as flupver
|
||||
# elif server == "flup-fcgi-fork":
|
||||
# from flup.server.fcgi_fork import WSGIServer
|
||||
# from flup.server.fcgi_fork import __version__ as flupver
|
||||
# else:
|
||||
# raise ValueError
|
||||
# except ImportError:
|
||||
# _logger.exception(f"Could not import {server} (https://gunicorn.org).")
|
||||
# _logger.error("Try `pip install flup`.")
|
||||
# return False
|
||||
|
||||
# version = f"{WSGIServer.__module__}/{flupver}"
|
||||
# version = f"WsgiDAV/{__version__} {version} Python {util.PYTHON_VERSION}"
|
||||
# _logger.info(f"Running {version} ...")
|
||||
|
||||
# server = WSGIServer(
|
||||
# app,
|
||||
# bindAddress=(config["host"], config["port"]),
|
||||
# # debug=True,
|
||||
# )
|
||||
# try:
|
||||
# server.run()
|
||||
# except KeyboardInterrupt:
|
||||
# _logger.warning("Caught Ctrl-C, shutting down...")
|
||||
# return
|
||||
|
||||
|
||||
def _run_gevent(app, config, server):
|
||||
"""Run WsgiDAV using gevent if gevent (http://www.gevent.org).
|
||||
|
||||
See
|
||||
https://github.com/gevent/gevent/blob/master/src/gevent/pywsgi.py#L1356
|
||||
https://github.com/gevent/gevent/blob/master/src/gevent/server.py#L38
|
||||
for more options.
|
||||
"""
|
||||
try:
|
||||
import gevent
|
||||
import gevent.monkey
|
||||
from gevent.pywsgi import WSGIServer
|
||||
except ImportError:
|
||||
_logger.exception("Could not import gevent (http://www.gevent.org).")
|
||||
_logger.error("Try `pip install gevent`.")
|
||||
return False
|
||||
|
||||
gevent.monkey.patch_all()
|
||||
|
||||
info = _get_common_info(config)
|
||||
version = f"gevent/{gevent.__version__}"
|
||||
version = f"WsgiDAV/{__version__} {version} Python {util.PYTHON_VERSION}"
|
||||
|
||||
# Override or add custom args
|
||||
server_args = {
|
||||
"wsgi_app": app,
|
||||
"bind_addr": (config["host"], config["port"]),
|
||||
}
|
||||
server_args.update(config.get("server_args", {}))
|
||||
|
||||
if info["use_ssl"]:
|
||||
dav_server = WSGIServer(
|
||||
server_args["bind_addr"],
|
||||
app,
|
||||
keyfile=info["ssl_pk"],
|
||||
certfile=info["ssl_cert"],
|
||||
ca_certs=info["ssl_chain"],
|
||||
)
|
||||
else:
|
||||
dav_server = WSGIServer(server_args["bind_addr"], app)
|
||||
|
||||
# If the caller passed a startup event, monkey patch the server to set it
|
||||
# when the request handler loop is entered
|
||||
startup_event = config.get("startup_event")
|
||||
if startup_event:
|
||||
|
||||
def _patched_start():
|
||||
dav_server.start_accepting = org_start # undo the monkey patch
|
||||
org_start()
|
||||
_logger.info("gevent is ready")
|
||||
startup_event.set()
|
||||
|
||||
org_start = dav_server.start_accepting
|
||||
dav_server.start_accepting = _patched_start
|
||||
|
||||
_logger.info(f"Running {version}")
|
||||
_logger.info(f"Serving on {info['url']} ...")
|
||||
try:
|
||||
gevent.spawn(dav_server.serve_forever())
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
return
|
||||
|
||||
|
||||
def _run_gunicorn(app, config, server):
|
||||
"""Run WsgiDAV using Gunicorn (https://gunicorn.org)."""
|
||||
try:
|
||||
import gunicorn.app.base
|
||||
except ImportError:
|
||||
_logger.exception("Could not import Gunicorn (https://gunicorn.org).")
|
||||
_logger.error("Try `pip install gunicorn` (UNIX only).")
|
||||
return False
|
||||
|
||||
info = _get_common_info(config)
|
||||
|
||||
class GunicornApplication(gunicorn.app.base.BaseApplication):
|
||||
def __init__(self, app, options=None):
|
||||
|
@ -824,32 +667,171 @@ def _run_gunicorn(app, config, mode):
|
|||
def load(self):
|
||||
return self.application
|
||||
|
||||
options = {
|
||||
# See https://docs.gunicorn.org/en/latest/settings.html
|
||||
server_args = {
|
||||
"bind": "{}:{}".format(config["host"], config["port"]),
|
||||
"threads": 50,
|
||||
"timeout": 1200,
|
||||
}
|
||||
GunicornApplication(app, options).run()
|
||||
if info["use_ssl"]:
|
||||
server_args.update(
|
||||
{
|
||||
"keyfile": info["ssl_pk"],
|
||||
"certfile": info["ssl_cert"],
|
||||
"ca_certs": info["ssl_chain"],
|
||||
# "ssl_version": ssl_version
|
||||
# "cert_reqs": ssl_cert_reqs
|
||||
# "ciphers": ssl_ciphers
|
||||
}
|
||||
)
|
||||
# Override or add custom args
|
||||
server_args.update(config.get("server_args", {}))
|
||||
|
||||
version = f"gunicorn/{gunicorn.__version__}"
|
||||
version = f"WsgiDAV/{__version__} {version} Python {util.PYTHON_VERSION}"
|
||||
_logger.info(f"Running {version} ...")
|
||||
|
||||
GunicornApplication(app, server_args).run()
|
||||
|
||||
|
||||
def _run_paste(app, config, server):
|
||||
"""Run WsgiDAV using paste.httpserver, if Paste is installed.
|
||||
|
||||
See http://pythonpaste.org/modules/httpserver.html for more options
|
||||
"""
|
||||
try:
|
||||
from paste import httpserver
|
||||
except ImportError:
|
||||
_logger.exception(
|
||||
"Could not import paste.httpserver (https://github.com/cdent/paste)."
|
||||
)
|
||||
_logger.error("Try `pip install paste`.")
|
||||
return False
|
||||
|
||||
info = _get_common_info(config)
|
||||
|
||||
version = httpserver.WSGIHandler.server_version
|
||||
version = f"WsgiDAV/{__version__} {version} Python {util.PYTHON_VERSION}"
|
||||
|
||||
# See http://pythonpaste.org/modules/httpserver.html for more options
|
||||
server = httpserver.serve(
|
||||
app,
|
||||
host=config["host"],
|
||||
port=config["port"],
|
||||
server_version=version,
|
||||
# This option enables handling of keep-alive and expect-100:
|
||||
protocol_version="HTTP/1.1",
|
||||
start_loop=False,
|
||||
)
|
||||
|
||||
if config["verbose"] >= 5:
|
||||
__handle_one_request = server.RequestHandlerClass.handle_one_request
|
||||
|
||||
def handle_one_request(self):
|
||||
__handle_one_request(self)
|
||||
if self.close_connection == 1:
|
||||
_logger.debug("HTTP Connection : close")
|
||||
else:
|
||||
_logger.debug("HTTP Connection : continue")
|
||||
|
||||
server.RequestHandlerClass.handle_one_request = handle_one_request
|
||||
|
||||
_logger.info(f"Running {version} ...")
|
||||
host, port = server.server_address
|
||||
if host == "0.0.0.0":
|
||||
_logger.info(f"Serving on 0.0.0.0:{port} view at http://127.0.0.1:{port}")
|
||||
else:
|
||||
_logger.info(f"Serving on {info['url']}")
|
||||
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
return
|
||||
|
||||
|
||||
def _run_uvicorn(app, config, server):
|
||||
"""Run WsgiDAV using Uvicorn (https://www.uvicorn.org)."""
|
||||
try:
|
||||
import uvicorn
|
||||
except ImportError:
|
||||
_logger.exception("Could not import Uvicorn (https://www.uvicorn.org).")
|
||||
_logger.error("Try `pip install uvicorn`.")
|
||||
return False
|
||||
|
||||
info = _get_common_info(config)
|
||||
|
||||
# See https://www.uvicorn.org/settings/
|
||||
server_args = {
|
||||
"interface": "wsgi",
|
||||
"host": config["host"],
|
||||
"port": config["port"],
|
||||
# TODO: see _run_cheroot()
|
||||
}
|
||||
if info["use_ssl"]:
|
||||
server_args.update(
|
||||
{
|
||||
"ssl_keyfile": info["ssl_pk"],
|
||||
"ssl_certfile": info["ssl_cert"],
|
||||
"ssl_ca_certs": info["ssl_chain"],
|
||||
# "ssl_keyfile_password": ssl_keyfile_password
|
||||
# "ssl_version": ssl_version
|
||||
# "ssl_cert_reqs": ssl_cert_reqs
|
||||
# "ssl_ciphers": ssl_ciphers
|
||||
}
|
||||
)
|
||||
# Override or add custom args
|
||||
server_args.update(config.get("server_args", {}))
|
||||
|
||||
version = f"uvicorn/{uvicorn.__version__}"
|
||||
version = f"WsgiDAV/{__version__} {version} Python {util.PYTHON_VERSION}"
|
||||
_logger.info(f"Running {version} ...")
|
||||
|
||||
uvicorn.run(app, **server_args)
|
||||
|
||||
|
||||
def _run_wsgiref(app, config, _server):
|
||||
"""Run WsgiDAV using wsgiref.simple_server (https://docs.python.org/3/library/wsgiref.html)."""
|
||||
from wsgiref.simple_server import WSGIRequestHandler, make_server
|
||||
|
||||
version = WSGIRequestHandler.server_version
|
||||
version = f"WsgiDAV/{__version__} {version}" # Python {util.PYTHON_VERSION}"
|
||||
_logger.info(f"Running {version} ...")
|
||||
|
||||
_logger.warning(
|
||||
"WARNING: This single threaded server (wsgiref) is not meant for production."
|
||||
)
|
||||
WSGIRequestHandler.server_version = version
|
||||
httpd = make_server(config["host"], config["port"], app)
|
||||
# httpd.RequestHandlerClass.server_version = version
|
||||
try:
|
||||
httpd.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
_logger.warning("Caught Ctrl-C, shutting down...")
|
||||
return
|
||||
|
||||
|
||||
SUPPORTED_SERVERS = {
|
||||
"paste": _run_paste,
|
||||
"gevent": _run_gevent,
|
||||
"cheroot": _run_cheroot,
|
||||
"cherrypy": _run__cherrypy,
|
||||
# "cherrypy": _run__cherrypy,
|
||||
"ext-wsgiutils": _run_ext_wsgiutils,
|
||||
"flup-fcgi": _run_flup,
|
||||
"flup-fcgi_fork": _run_flup,
|
||||
"wsgiref": _run_wsgiref,
|
||||
# "flup-fcgi_fork": _run_flup,
|
||||
# "flup-fcgi": _run_flup,
|
||||
"gevent": _run_gevent,
|
||||
"gunicorn": _run_gunicorn,
|
||||
"paste": _run_paste,
|
||||
"uvicorn": _run_uvicorn,
|
||||
"wsgiref": _run_wsgiref,
|
||||
}
|
||||
|
||||
|
||||
def run():
|
||||
config = _init_config()
|
||||
cli_opts, config = _init_config()
|
||||
|
||||
util.init_logging(config)
|
||||
|
||||
info = _get_common_info(config)
|
||||
|
||||
app = WsgiDAVApp(config)
|
||||
|
||||
server = config["server"]
|
||||
|
@ -867,7 +849,19 @@ def run():
|
|||
"Consider `pip install lxml`(see https://pypi.python.org/pypi/lxml)."
|
||||
)
|
||||
|
||||
if cli_opts["browse"]:
|
||||
BROWSE_DELAY = 2.0
|
||||
|
||||
def _worker():
|
||||
url = info["url"]
|
||||
url = url.replace("0.0.0.0", "127.0.0.1")
|
||||
_logger.info(f"Starting browser on {url} ...")
|
||||
webbrowser.open(url)
|
||||
|
||||
Timer(BROWSE_DELAY, _worker).start()
|
||||
|
||||
handler(app, config, server)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
"""
|
||||
Simple example how to a run WsgiDAV in a 3rd-party WSGI server.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from tempfile import gettempdir
|
||||
|
||||
from wsgidav import __version__
|
||||
|
|
|
@ -16,9 +16,9 @@ consumer at the same time::
|
|||
return queue
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import queue
|
||||
|
||||
from wsgidav import compat, util
|
||||
from wsgidav import util
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
|
@ -49,7 +49,7 @@ class FileLikeQueue(object):
|
|||
|
||||
def __init__(self, max_size=0):
|
||||
self.is_closed = False
|
||||
self.queue = compat.queue.Queue(max_size)
|
||||
self.queue = queue.Queue(max_size)
|
||||
self.unread = ""
|
||||
|
||||
def read(self, size=0):
|
||||
|
@ -69,8 +69,8 @@ class FileLikeQueue(object):
|
|||
try:
|
||||
# Read pending data, blocking if neccessary
|
||||
# (but handle the case that close() is called while waiting)
|
||||
res += compat.to_native(self.queue.get(True, 0.1))
|
||||
except compat.queue.Empty:
|
||||
res += util.to_str(self.queue.get(True, 0.1))
|
||||
except queue.Empty:
|
||||
# There was no pending data: wait for more, unless close() was called
|
||||
if self.is_closed:
|
||||
break
|
||||
|
@ -90,7 +90,7 @@ class FileLikeQueue(object):
|
|||
raise ValueError("Cannot write to closed object")
|
||||
# print("FileLikeQueue.write(), n={}".format(len(chunk)))
|
||||
# Add chunk to queue (blocks if queue is full)
|
||||
if compat.is_basestring(chunk):
|
||||
if util.is_basestring(chunk):
|
||||
self.queue.put(chunk)
|
||||
else: # if not a string, assume an iterable
|
||||
for o in chunk:
|
||||
|
|
109
wsgidav/util.py
109
wsgidav/util.py
|
@ -6,7 +6,9 @@
|
|||
"""
|
||||
Miscellaneous support functions for WsgiDAV.
|
||||
"""
|
||||
import base64
|
||||
import calendar
|
||||
import collections.abc
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
|
@ -18,8 +20,9 @@ import time
|
|||
from email.utils import formatdate, parsedate
|
||||
from hashlib import md5
|
||||
from pprint import pformat
|
||||
from typing import Optional
|
||||
from urllib.parse import quote
|
||||
|
||||
from wsgidav import compat
|
||||
from wsgidav.dav_error import (
|
||||
HTTP_BAD_REQUEST,
|
||||
HTTP_CREATED,
|
||||
|
@ -43,6 +46,60 @@ PYTHON_VERSION = "{}.{}.{}".format(
|
|||
sys.version_info[0], sys.version_info[1], sys.version_info[2]
|
||||
)
|
||||
|
||||
filesystemencoding = sys.getfilesystemencoding()
|
||||
|
||||
|
||||
# ========================================================================
|
||||
# String tools
|
||||
# ========================================================================
|
||||
|
||||
|
||||
def is_basestring(s):
|
||||
"""Return True for any string type (for str/unicode on Py2 and bytes/str on Py3)."""
|
||||
return isinstance(s, (str, bytes))
|
||||
|
||||
|
||||
def is_bytes(s):
|
||||
"""Return True for bytestrings (for str on Py2 and bytes on Py3)."""
|
||||
return isinstance(s, bytes)
|
||||
|
||||
|
||||
def is_str(s):
|
||||
"""Return True for native strings (for str on Py2 and Py3)."""
|
||||
return isinstance(s, str)
|
||||
|
||||
|
||||
def to_bytes(s, encoding="utf8"):
|
||||
"""Convert a text string (unicode) to bytestring (str on Py2 and bytes on Py3)."""
|
||||
if type(s) is not bytes:
|
||||
s = bytes(s, encoding)
|
||||
return s
|
||||
|
||||
|
||||
def to_str(s, encoding="utf8"):
|
||||
"""Convert data to native str type (bytestring on Py2 and unicode on Py3)."""
|
||||
if type(s) is bytes:
|
||||
s = str(s, encoding)
|
||||
elif type(s) is not str:
|
||||
s = str(s)
|
||||
return s
|
||||
|
||||
|
||||
# --- WSGI support ---
|
||||
|
||||
|
||||
def unicode_to_wsgi(u):
|
||||
"""Convert an environment variable to a WSGI 'bytes-as-unicode' string."""
|
||||
# Taken from PEP3333; the server should already have performed this, when
|
||||
# passing environ to the WSGI application
|
||||
return u.encode(filesystemencoding, "surrogateescape").decode("iso-8859-1")
|
||||
|
||||
|
||||
def wsgi_to_bytes(s):
|
||||
"""Convert a native string to a WSGI / HTTP compatible byte string."""
|
||||
# Taken from PEP3333
|
||||
return s.encode("iso-8859-1")
|
||||
|
||||
|
||||
# ========================================================================
|
||||
# Time tools
|
||||
|
@ -257,7 +314,7 @@ def get_module_logger(moduleName, defaultToVerbose=False):
|
|||
|
||||
def deep_update(d, u):
|
||||
for k, v in u.items():
|
||||
if isinstance(v, compat.collections_abc.Mapping):
|
||||
if isinstance(v, collections.abc.Mapping):
|
||||
d[k] = deep_update(d.get(k, {}), v)
|
||||
else:
|
||||
d[k] = v
|
||||
|
@ -299,7 +356,7 @@ def dynamic_instantiate_middleware(name, args, expand=None):
|
|||
|
||||
def _expand(v):
|
||||
"""Replace some string templates with defined values."""
|
||||
if expand and compat.is_basestring(v) and v.lower() in expand:
|
||||
if expand and is_basestring(v) and v.lower() in expand:
|
||||
return expand[v]
|
||||
return v
|
||||
|
||||
|
@ -385,12 +442,12 @@ def split_namespace(clarkName):
|
|||
def to_unicode_safe(s):
|
||||
"""Convert a binary string to Unicode using UTF-8 (fallback to ISO-8859-1)."""
|
||||
try:
|
||||
u = compat.to_unicode(s, "utf8")
|
||||
u = to_str(s, "utf8")
|
||||
except ValueError:
|
||||
_logger.error(
|
||||
"to_unicode_safe({!r}) *** UTF-8 failed. Trying ISO-8859-1".format(s)
|
||||
)
|
||||
u = compat.to_unicode(s, "ISO-8859-1")
|
||||
u = to_str(s, "ISO-8859-1")
|
||||
return u
|
||||
|
||||
|
||||
|
@ -405,7 +462,7 @@ def safe_re_encode(s, encoding_to, errors="backslashreplace"):
|
|||
# prev = s
|
||||
if not encoding_to:
|
||||
encoding_to = "ASCII"
|
||||
if compat.is_bytes(s):
|
||||
if is_bytes(s):
|
||||
s = s.decode(encoding_to, errors=errors).encode(encoding_to)
|
||||
else:
|
||||
s = s.encode(encoding_to, errors=errors).decode(encoding_to)
|
||||
|
@ -415,7 +472,7 @@ def safe_re_encode(s, encoding_to, errors="backslashreplace"):
|
|||
|
||||
def string_repr(s):
|
||||
"""Return a string as hex dump."""
|
||||
if compat.is_bytes(s):
|
||||
if is_bytes(s):
|
||||
res = "{!r}: ".format(s)
|
||||
for b in s:
|
||||
if type(b) is str: # Py2
|
||||
|
@ -625,12 +682,12 @@ def join_uri(uri, *segments):
|
|||
return uri.rstrip("/") + "/" + sub
|
||||
|
||||
|
||||
def get_uri_name(uri):
|
||||
def get_uri_name(uri: str) -> str:
|
||||
"""Return local name, i.e. last segment of URI."""
|
||||
return uri.strip("/").split("/")[-1]
|
||||
|
||||
|
||||
def get_uri_parent(uri):
|
||||
def get_uri_parent(uri: str) -> Optional[str]:
|
||||
"""Return URI of parent collection with trailing '/', or None, if URI is top-level.
|
||||
|
||||
This function simply strips the last segment. It does not test, if the
|
||||
|
@ -641,7 +698,7 @@ def get_uri_parent(uri):
|
|||
return uri.rstrip("/").rsplit("/", 1)[0] + "/"
|
||||
|
||||
|
||||
def is_child_uri(parentUri, childUri):
|
||||
def is_child_uri(parentUri: str, childUri: str) -> bool:
|
||||
"""Return True, if childUri is a child of parentUri.
|
||||
|
||||
This function accounts for the fact that '/a/b/c' and 'a/b/c/' are
|
||||
|
@ -649,8 +706,8 @@ def is_child_uri(parentUri, childUri):
|
|||
Note that '/a/b/cd' is NOT a child of 'a/b/c'.
|
||||
"""
|
||||
return (
|
||||
parentUri
|
||||
and childUri
|
||||
bool(parentUri)
|
||||
and bool(childUri)
|
||||
and childUri.rstrip("/").startswith(parentUri.rstrip("/") + "/")
|
||||
)
|
||||
|
||||
|
@ -686,10 +743,10 @@ def make_complete_url(environ, localUri=None):
|
|||
if environ["SERVER_PORT"] != "80":
|
||||
url += ":" + environ["SERVER_PORT"]
|
||||
|
||||
url += compat.quote(environ.get("SCRIPT_NAME", ""))
|
||||
url += quote(environ.get("SCRIPT_NAME", ""))
|
||||
|
||||
if localUri is None:
|
||||
url += compat.quote(environ.get("PATH_INFO", ""))
|
||||
url += quote(environ.get("PATH_INFO", ""))
|
||||
if environ.get("QUERY_STRING"):
|
||||
url += "?" + environ["QUERY_STRING"]
|
||||
else:
|
||||
|
@ -779,7 +836,7 @@ def parse_xml_body(environ, allow_empty=False):
|
|||
_logger.info(
|
||||
"{} XML request body:\n{}".format(
|
||||
environ["REQUEST_METHOD"],
|
||||
compat.to_native(xml_to_bytes(rootEL, pretty_print=True)),
|
||||
to_str(xml_to_bytes(rootEL, pretty_print=True)),
|
||||
)
|
||||
)
|
||||
environ["wsgidav.dump_request_body"] = False
|
||||
|
@ -822,9 +879,9 @@ def send_status_response(environ, start_response, e, add_headers=None, is_head=F
|
|||
|
||||
content_type, body = e.get_response_page()
|
||||
if is_head:
|
||||
body = compat.b_empty
|
||||
body = b""
|
||||
|
||||
assert compat.is_bytes(body), body # If not, Content-Length is wrong!
|
||||
assert is_bytes(body), body # If not, Content-Length is wrong!
|
||||
start_response(
|
||||
status,
|
||||
[
|
||||
|
@ -843,7 +900,7 @@ def send_multi_status_response(environ, start_response, multistatusEL):
|
|||
if environ.get("wsgidav.dump_response_body"):
|
||||
xml = "{} XML response body:\n{}".format(
|
||||
environ["REQUEST_METHOD"],
|
||||
compat.to_native(xml_to_bytes(multistatusEL, pretty_print=True)),
|
||||
to_str(xml_to_bytes(multistatusEL, pretty_print=True)),
|
||||
)
|
||||
environ["wsgidav.dump_response_body"] = xml
|
||||
|
||||
|
@ -852,7 +909,7 @@ def send_multi_status_response(environ, start_response, multistatusEL):
|
|||
# (Vista and others would accept this).
|
||||
xml_data = xml_to_bytes(multistatusEL, pretty_print=False)
|
||||
# If not, Content-Length is wrong!
|
||||
assert compat.is_bytes(xml_data), xml_data
|
||||
assert is_bytes(xml_data), xml_data
|
||||
|
||||
headers = [
|
||||
("Content-Type", "application/xml"),
|
||||
|
@ -911,7 +968,7 @@ def add_property_response(multistatusEL, href, propList):
|
|||
# log("href value:{}".format(string_repr(href)))
|
||||
# etree.SubElement(responseEL, "{DAV:}href").text = toUnicode(href)
|
||||
etree.SubElement(responseEL, "{DAV:}href").text = href
|
||||
# etree.SubElement(responseEL, "{DAV:}href").text = compat.quote(href, safe="/" + "!*'(),"
|
||||
# etree.SubElement(responseEL, "{DAV:}href").text = quote(href, safe="/" + "!*'(),"
|
||||
# + "$-_|.")
|
||||
|
||||
# One <propstat> per status code
|
||||
|
@ -940,15 +997,15 @@ def add_property_response(multistatusEL, href, propList):
|
|||
|
||||
def calc_hexdigest(s):
|
||||
"""Return md5 digest for a string."""
|
||||
s = compat.to_bytes(s)
|
||||
s = to_bytes(s)
|
||||
return md5(s).hexdigest() # return native string
|
||||
|
||||
|
||||
def calc_base64(s):
|
||||
"""Return base64 encoded binarystring."""
|
||||
s = compat.to_bytes(s)
|
||||
s = compat.base64_encodebytes(s).strip() # return bytestring
|
||||
return compat.to_native(s)
|
||||
s = to_bytes(s)
|
||||
s = base64.encodebytes(s).strip() # return bytestring
|
||||
return to_str(s)
|
||||
|
||||
|
||||
def get_etag(file_path):
|
||||
|
@ -965,7 +1022,7 @@ def get_etag(file_path):
|
|||
# (At least on Vista) os.path.exists returns False, if a file name contains
|
||||
# special characters, even if it is correctly UTF-8 encoded.
|
||||
# So we convert to unicode. On the other hand, md5() needs a byte string.
|
||||
if compat.is_bytes(file_path):
|
||||
if is_bytes(file_path):
|
||||
unicodeFilePath = to_unicode_safe(file_path)
|
||||
else:
|
||||
unicodeFilePath = file_path
|
||||
|
@ -1280,7 +1337,7 @@ def test_if_header_dict(dav_res, dictIf, fullurl, locktokenlist, entitytag):
|
|||
return False
|
||||
|
||||
|
||||
test_if_header_dict.__test__ = False # Tell nose to ignore this function
|
||||
# test_if_header_dict.__test__ = False # Tell nose to ignore this function
|
||||
|
||||
|
||||
# ========================================================================
|
||||
|
|
|
@ -52,8 +52,9 @@ import inspect
|
|||
import platform
|
||||
import sys
|
||||
import time
|
||||
from urllib.parse import unquote
|
||||
|
||||
from wsgidav import __version__, compat, util
|
||||
from wsgidav import __version__, util
|
||||
from wsgidav.dav_provider import DAVProvider
|
||||
from wsgidav.default_conf import DEFAULT_CONFIG
|
||||
from wsgidav.fs_dav_provider import FilesystemProvider
|
||||
|
@ -90,6 +91,7 @@ def _check_config(config):
|
|||
# "dir_browser.enable": "middleware_stack",
|
||||
"dir_browser.ms_sharepoint_plugin": "dir_browser.ms_sharepoint_support",
|
||||
"dir_browser.ms_sharepoint_url": "dir_browser.ms_sharepoint_support",
|
||||
"dir_browser.ms_mount": "(no replacement)",
|
||||
"domain_controller": "http_authenticator.domain_controller",
|
||||
"domaincontroller": "http_authenticator.domain_controller",
|
||||
"emulate_win32_lastmod": "hotfix.emulate_win32_lastmod",
|
||||
|
@ -138,7 +140,7 @@ class WsgiDAVApp(object):
|
|||
|
||||
self.re_encode_path_info = hotfixes.get("re_encode_path_info", None)
|
||||
if self.re_encode_path_info is None:
|
||||
self.re_encode_path_info = compat.PY3
|
||||
self.re_encode_path_info = True
|
||||
|
||||
self.unquote_path_info = hotfixes.get("unquote_path_info", False)
|
||||
|
||||
|
@ -195,7 +197,7 @@ class WsgiDAVApp(object):
|
|||
# The middleware stack configuration may contain plain strings, dicts,
|
||||
# classes, or objects
|
||||
app = None
|
||||
if compat.is_basestring(mw):
|
||||
if util.is_basestring(mw):
|
||||
# If a plain string is passed, try to import it, assuming
|
||||
# `BaseMiddleware` signature
|
||||
app_class = dynamic_import_class(mw)
|
||||
|
@ -234,8 +236,6 @@ class WsgiDAVApp(object):
|
|||
else:
|
||||
_logger.error("Could not add middleware {}.".format(mw))
|
||||
|
||||
domain_controller
|
||||
# Print info
|
||||
_logger.info(
|
||||
"WsgiDAV/{} Python/{} {}".format(
|
||||
__version__, util.PYTHON_VERSION, platform.platform(aliased=True)
|
||||
|
@ -294,7 +294,7 @@ class WsgiDAVApp(object):
|
|||
share = "/" + share.strip("/")
|
||||
assert share not in self.provider_map
|
||||
|
||||
if compat.is_basestring(provider):
|
||||
if util.is_basestring(provider):
|
||||
# Syntax:
|
||||
# <mount_path>: <folder_path>
|
||||
# We allow a simple string as 'provider'. In this case we interpret
|
||||
|
@ -381,18 +381,18 @@ class WsgiDAVApp(object):
|
|||
# example.
|
||||
# This is done by default for Python 3, but can be turned off in settings.
|
||||
if self.re_encode_path_info:
|
||||
path = environ["PATH_INFO"] = compat.wsgi_to_bytes(path).decode()
|
||||
path = environ["PATH_INFO"] = util.wsgi_to_bytes(path).decode()
|
||||
|
||||
# We optionally unquote PATH_INFO here, although this should already be
|
||||
# done by the server (#8).
|
||||
if self.unquote_path_info:
|
||||
path = compat.unquote(environ["PATH_INFO"])
|
||||
path = unquote(environ["PATH_INFO"])
|
||||
|
||||
# GC issue 22: Pylons sends root as u'/'
|
||||
if not compat.is_native(path):
|
||||
if not util.is_str(path):
|
||||
_logger.warning("Got non-native PATH_INFO: {!r}".format(path))
|
||||
# path = path.encode("utf8")
|
||||
path = compat.to_native(path)
|
||||
path = util.to_str(path)
|
||||
|
||||
# Always adding these values to environ:
|
||||
environ["wsgidav.config"] = self.config
|
||||
|
@ -433,7 +433,7 @@ class WsgiDAVApp(object):
|
|||
environ["PATH_INFO"] = path[len(share) :]
|
||||
|
||||
# assert isinstance(path, str)
|
||||
assert compat.is_native(path)
|
||||
assert util.is_str(path)
|
||||
# See http://mail.python.org/pipermail/web-sig/2007-January/002475.html
|
||||
# for some clarification about SCRIPT_NAME/PATH_INFO format
|
||||
# SCRIPT_NAME starts with '/' or is empty
|
||||
|
|
|
@ -6,11 +6,8 @@
|
|||
"""
|
||||
Small wrapper for different etree packages.
|
||||
"""
|
||||
# from __future__ import print_function
|
||||
|
||||
import logging
|
||||
|
||||
from wsgidav import compat
|
||||
from io import StringIO
|
||||
|
||||
__docformat__ = "reStructuredText"
|
||||
|
||||
|
@ -76,8 +73,11 @@ def xml_to_bytes(element, pretty_print=False):
|
|||
"""Wrapper for etree.tostring, that takes care of unsupported pretty_print
|
||||
option and prepends an encoding header."""
|
||||
if use_lxml:
|
||||
xml = etree.tostring(
|
||||
element, encoding="UTF-8", xml_declaration=True, pretty_print=pretty_print
|
||||
xml = etree.tostring( # pylint: disable=unexpected-keyword-arg
|
||||
element,
|
||||
encoding="UTF-8",
|
||||
xml_declaration=True,
|
||||
pretty_print=pretty_print,
|
||||
)
|
||||
else:
|
||||
xml = etree.tostring(element, encoding="UTF-8")
|
||||
|
@ -118,7 +118,7 @@ def element_content_as_string(element):
|
|||
"""
|
||||
if len(element) == 0:
|
||||
return element.text or "" # Make sure, None is returned as ''
|
||||
stream = compat.StringIO()
|
||||
stream = StringIO()
|
||||
for childnode in element:
|
||||
stream.write(xml_to_bytes(childnode, pretty_print=False) + "\n")
|
||||
# print(xml_to_bytes(childnode, pretty_print=False), file=stream)
|
||||
|
|
|
@ -22,8 +22,7 @@ tasks:
|
|||
github: true # GitHub repo name valid and online accessible
|
||||
clean: true # Repo must/must not contain modifications
|
||||
os: null # (str, list)
|
||||
# Build with Python 3.8 [issue #216](https://github.com/mar10/wsgidav/issues/216)
|
||||
python: "==3.8" # SemVer specifier
|
||||
python: ">=3.9" # SemVer specifier
|
||||
twine: true # `twine` is available
|
||||
up_to_date: true # everything pulled from remote
|
||||
venv: true # running inside a virtual environment
|
||||
|
@ -31,7 +30,7 @@ tasks:
|
|||
|
||||
# 'exec': Run arbitrary shell command
|
||||
- task: exec
|
||||
args: ["tox", "-e", "lint-py3,lint-py27,check"] # shell command and optional arguments
|
||||
args: ["tox", "-e", "lint,check"] # shell command and optional arguments
|
||||
always: true # `true`: run even in dry-run mode
|
||||
# silent: true # `true`: suppress output
|
||||
stream: true
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue