init files
This commit is contained in:
commit
1d42d6c03e
|
@ -0,0 +1,5 @@
|
||||||
|
runtime/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
__pycache__/
|
||||||
|
*.log
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
|
@ -0,0 +1,14 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="PublishConfigData" remoteFilesAllowedToDisappearOnAutoupload="false">
|
||||||
|
<serverData>
|
||||||
|
<paths name="hyu3@linuxsrv01.ece.uw.edu:22">
|
||||||
|
<serverdata>
|
||||||
|
<mappings>
|
||||||
|
<mapping local="$PROJECT_DIR$" web="/" />
|
||||||
|
</mappings>
|
||||||
|
</serverdata>
|
||||||
|
</paths>
|
||||||
|
</serverData>
|
||||||
|
</component>
|
||||||
|
</project>
|
|
@ -0,0 +1,12 @@
|
||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<profile version="1.0">
|
||||||
|
<option name="myName" value="Project Default" />
|
||||||
|
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||||
|
<option name="ignoredErrors">
|
||||||
|
<list>
|
||||||
|
<option value="N806" />
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
</inspection_tool>
|
||||||
|
</profile>
|
||||||
|
</component>
|
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectRootManager" version="2" languageLevel="JDK_16" project-jdk-name="Python 3.9 (sshcec)" project-jdk-type="Python SDK">
|
||||||
|
<output url="file://$PROJECT_DIR$/out" />
|
||||||
|
</component>
|
||||||
|
</project>
|
|
@ -0,0 +1,8 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectModuleManager">
|
||||||
|
<modules>
|
||||||
|
<module fileurl="file://$PROJECT_DIR$/sshcec.iml" filepath="$PROJECT_DIR$/sshcec.iml" />
|
||||||
|
</modules>
|
||||||
|
</component>
|
||||||
|
</project>
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,34 @@
|
||||||
|
# -*- mode: python ; coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
block_cipher = None
|
||||||
|
|
||||||
|
|
||||||
|
a = Analysis(['toplevel.py', 'get_runtime.py'],
|
||||||
|
pathex=['D:\\eyhc\\sshcec'],
|
||||||
|
binaries=[],
|
||||||
|
datas=[],
|
||||||
|
hiddenimports=[],
|
||||||
|
hookspath=[],
|
||||||
|
runtime_hooks=[],
|
||||||
|
excludes=[],
|
||||||
|
win_no_prefer_redirects=False,
|
||||||
|
win_private_assemblies=False,
|
||||||
|
cipher=block_cipher,
|
||||||
|
noarchive=False)
|
||||||
|
pyz = PYZ(a.pure, a.zipped_data,
|
||||||
|
cipher=block_cipher)
|
||||||
|
exe = EXE(pyz,
|
||||||
|
a.scripts,
|
||||||
|
a.binaries,
|
||||||
|
a.zipfiles,
|
||||||
|
a.datas,
|
||||||
|
[],
|
||||||
|
name='CTOS-wrapper',
|
||||||
|
debug=False,
|
||||||
|
bootloader_ignore_signals=False,
|
||||||
|
strip=False,
|
||||||
|
upx=True,
|
||||||
|
upx_exclude=[],
|
||||||
|
runtime_tmpdir=None,
|
||||||
|
console=False )
|
|
@ -0,0 +1,13 @@
|
||||||
|
import paramiko
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
transport = paramiko.Transport(("linux-lab-055.ece.uw.edu", 22))
|
||||||
|
transport.connect(None, "hyu3", ":Eric200002182919")
|
||||||
|
|
||||||
|
# Go!
|
||||||
|
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||||
|
sftp.chdir("Documents")
|
||||||
|
print(sftp.listdir())
|
||||||
|
# sftp.put('VNC_UWEE.jar', 'VNC_UWEE.jar')
|
||||||
|
# sftp.get('concentration.csv','concentration.csv')
|
|
@ -0,0 +1,2 @@
|
||||||
|
for i in range(41, 81):
|
||||||
|
print(f'"linux-lab-0{i}.ece.uw.edu"', end=" ")
|
|
@ -0,0 +1,64 @@
|
||||||
|
import os, platform, zipfile, tarfile, urllib.request
|
||||||
|
|
||||||
|
JAVA_VER = '8'
|
||||||
|
RUNTIME_PATH = os.path.join(os.getcwd(), 'runtime')
|
||||||
|
|
||||||
|
|
||||||
|
def get_platform_ext() -> tuple:
|
||||||
|
if platform.system() == "Windows":
|
||||||
|
if platform.architecture()[0] == "64bit":
|
||||||
|
return "x64-windows", "zip"
|
||||||
|
else:
|
||||||
|
return "x86-windows", "zip"
|
||||||
|
elif platform.system() == "Linux" and platform.architecture()[0] == "64bit":
|
||||||
|
return "x64-linux", "tar.gz"
|
||||||
|
elif platform.system() == "Darwin" and platform.architecture()[0] == "64bit":
|
||||||
|
return "macos", "tar.gz"
|
||||||
|
else:
|
||||||
|
raise ValueError("Unsupported system. Currently only supports windows and 64-bit linux/MacOS systems!")
|
||||||
|
|
||||||
|
|
||||||
|
# def get_download_url(version: str, arch_os: str, file_ext: str, jv_type: str = "jdk") -> str:
|
||||||
|
# if not jv_type == "jdk" or not jv_type == "jre":
|
||||||
|
# raise ValueError(f'{jv_type} is not an acceptable type. Accepted types are either "jdk" or "jre"')
|
||||||
|
# if version == '1.8':
|
||||||
|
# version = '8'
|
||||||
|
# return f"https://corretto.aws/downloads/latest/amazon-corretto-{version}-{arch_os}-{jv_type}.{file_ext}"
|
||||||
|
|
||||||
|
|
||||||
|
def install_runtime(jv_ver: str = JAVA_VER) -> str:
|
||||||
|
"""
|
||||||
|
:acknowledge: https://stackoverflow.com/a/7244263
|
||||||
|
"""
|
||||||
|
os_arch, ext = get_platform_ext()
|
||||||
|
url = f"https://corretto.aws/downloads/latest/amazon-corretto-{jv_ver}-{os_arch}-jdk.{ext}"
|
||||||
|
print(url)
|
||||||
|
try:
|
||||||
|
urllib.request.urlretrieve(url, f'Runtime.cash')
|
||||||
|
if ext == "zip":
|
||||||
|
with zipfile.ZipFile(f'Runtime.cash') as z:
|
||||||
|
jv_name = z.namelist()[0].replace('/', '')
|
||||||
|
z.extractall(path=RUNTIME_PATH)
|
||||||
|
else: # non-windows, which assume is the tar.gz file
|
||||||
|
with tarfile.open(f'Runtime.cash', 'r:gz') as t:
|
||||||
|
jv_name = t.getnames()[0].replace('/', '')
|
||||||
|
t.extractall(path=RUNTIME_PATH)
|
||||||
|
return jv_name
|
||||||
|
finally:
|
||||||
|
os.unlink(f'Runtime.cash')
|
||||||
|
|
||||||
|
|
||||||
|
def get_java_path(jv_ver: str = JAVA_VER, directory: str = RUNTIME_PATH) -> str:
|
||||||
|
if not os.path.exists(directory):
|
||||||
|
return os.path.join(directory, install_runtime(jv_ver), "bin", "java.exe")
|
||||||
|
if jv_ver == '8':
|
||||||
|
jv_ver = '1.8'
|
||||||
|
jv_path = [root for root, d, f in os.walk(RUNTIME_PATH, topdown=False) if jv_ver in root and '\\bin' in root]
|
||||||
|
if jv_path:
|
||||||
|
return os.path.join(jv_path[0], "java.exe")
|
||||||
|
else:
|
||||||
|
return os.path.join(directory, install_runtime(jv_ver), "bin", "java.exe")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print(get_java_path())
|
|
@ -0,0 +1,73 @@
|
||||||
|
import paramiko
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class ShellHandler:
|
||||||
|
|
||||||
|
def __init__(self, host, user, psw):
|
||||||
|
self.ssh = paramiko.SSHClient()
|
||||||
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
self.ssh.connect(host, username=user, password=psw, port=22)
|
||||||
|
|
||||||
|
channel = self.ssh.invoke_shell()
|
||||||
|
self.stdin = channel.makefile('wb')
|
||||||
|
self.stdout = channel.makefile('r')
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.ssh.close()
|
||||||
|
|
||||||
|
def execute(self, cmd):
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param cmd: the command to be executed on the remote computer
|
||||||
|
:examples: execute('ls')
|
||||||
|
execute('finger')
|
||||||
|
execute('cd folder_name')
|
||||||
|
"""
|
||||||
|
cmd = cmd.strip('\n')
|
||||||
|
self.stdin.write(cmd + '\n')
|
||||||
|
finish = 'end of stdOUT buffer. finished with exit status'
|
||||||
|
echo_cmd = 'echo {} $?'.format(finish)
|
||||||
|
self.stdin.write(echo_cmd + '\n')
|
||||||
|
shin = self.stdin
|
||||||
|
self.stdin.flush()
|
||||||
|
|
||||||
|
shout = []
|
||||||
|
sherr = []
|
||||||
|
exit_status = 0
|
||||||
|
for line in self.stdout:
|
||||||
|
if str(line).startswith(cmd) or str(line).startswith(echo_cmd):
|
||||||
|
# up for now filled with shell junk from stdin
|
||||||
|
shout = []
|
||||||
|
elif str(line).startswith(finish):
|
||||||
|
# our finish command ends with the exit status
|
||||||
|
exit_status = int(str(line).rsplit(maxsplit=1)[1])
|
||||||
|
if exit_status:
|
||||||
|
# stderr is combined with stdout.
|
||||||
|
# thus, swap sherr with shout in a case of failure.
|
||||||
|
sherr = shout
|
||||||
|
shout = []
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# get rid of 'coloring and formatting' special characters
|
||||||
|
shout.append(re.compile(r'(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]').sub('', line).
|
||||||
|
replace('\b', '').replace('\r', ''))
|
||||||
|
|
||||||
|
|
||||||
|
s = ShellHandler('linux-lab-055.ece.uw.edu', 'hyu3', ':Eric200002182919')
|
||||||
|
s.execute("vncserver -list")
|
||||||
|
|
||||||
|
|
||||||
|
# ssh = paramiko.SSHClient()
|
||||||
|
# ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
# # linux-lab-055.ece.uw.edu
|
||||||
|
# ssh.connect("linux-lab-055.ece.uw.edu", username="hyu3", password=":Eric200002182919")
|
||||||
|
# ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command("vncserver -list")
|
||||||
|
#
|
||||||
|
# xdisplay = ssh_stdout.readlines()[-1]
|
||||||
|
#
|
||||||
|
# if "DISPLAY" in xdisplay:
|
||||||
|
# ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('vncserver -interface 0.0.0.0 && vncserver -list')
|
||||||
|
# xdisplay = ssh_stdout.readlines()[-1]
|
||||||
|
#
|
||||||
|
# print(xdisplay.split()[0][1:])
|
|
@ -0,0 +1,96 @@
|
||||||
|
# modified from https://github.com/paramiko/paramiko/blob/main/demos/interactive.py
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
from paramiko.py3compat import u
|
||||||
|
|
||||||
|
# windows does not have termios...
|
||||||
|
try:
|
||||||
|
import termios
|
||||||
|
import tty
|
||||||
|
|
||||||
|
has_termios = True
|
||||||
|
except ImportError:
|
||||||
|
has_termios = False
|
||||||
|
|
||||||
|
|
||||||
|
def interactive_shell(chan):
|
||||||
|
if has_termios:
|
||||||
|
posix_shell(chan)
|
||||||
|
else:
|
||||||
|
windows_shell(chan)
|
||||||
|
|
||||||
|
|
||||||
|
def posix_shell(chan):
|
||||||
|
import select
|
||||||
|
|
||||||
|
oldtty = termios.tcgetattr(sys.stdin)
|
||||||
|
try:
|
||||||
|
tty.setraw(sys.stdin.fileno())
|
||||||
|
tty.setcbreak(sys.stdin.fileno())
|
||||||
|
chan.settimeout(0.0)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
r, w, e = select.select([chan, sys.stdin], [], [])
|
||||||
|
if chan in r:
|
||||||
|
try:
|
||||||
|
x = u(chan.recv(1024))
|
||||||
|
if len(x) == 0:
|
||||||
|
sys.stdout.write("\r\n*** EOF\r\n")
|
||||||
|
break
|
||||||
|
sys.stdout.write(x)
|
||||||
|
sys.stdout.flush()
|
||||||
|
except socket.timeout:
|
||||||
|
pass
|
||||||
|
if sys.stdin in r:
|
||||||
|
x = sys.stdin.read(1)
|
||||||
|
if len(x) == 0:
|
||||||
|
break
|
||||||
|
chan.send(x)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, oldtty)
|
||||||
|
|
||||||
|
|
||||||
|
# thanks to Mike Looijmans for this code
|
||||||
|
def windows_shell(chan):
|
||||||
|
import threading
|
||||||
|
|
||||||
|
sys.stdout.write(
|
||||||
|
"Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n"
|
||||||
|
)
|
||||||
|
global cmdout
|
||||||
|
def writeall(sock):
|
||||||
|
global cmdout
|
||||||
|
line = 0
|
||||||
|
while True:
|
||||||
|
data = sock.recv(256)
|
||||||
|
if not data:
|
||||||
|
sys.exit(0)
|
||||||
|
cmdout = data.decode("utf-8")
|
||||||
|
if "]$" in data.decode("utf-8"):
|
||||||
|
line += 1
|
||||||
|
if line > 1:
|
||||||
|
sys.stdout.write("press ENTER to continue\n")
|
||||||
|
sys.stdout.write(cmdout)
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
|
writer = threading.Thread(target=writeall, args=(chan,))
|
||||||
|
writer.start()
|
||||||
|
|
||||||
|
try:
|
||||||
|
chan.send("/homes/hyu3/anaconda3/envs/Documents/bin/python /homes/hyu3/Documents/dummy.py\n")
|
||||||
|
# chan.send("vncpasswd\n")
|
||||||
|
while True:
|
||||||
|
if "]$" in cmdout:
|
||||||
|
chan.send("exit\n")
|
||||||
|
break
|
||||||
|
d = sys.stdin.read(1)
|
||||||
|
if not d:
|
||||||
|
break
|
||||||
|
chan.send(d)
|
||||||
|
except (EOFError, OSError):
|
||||||
|
# user hit ^Z or F6
|
||||||
|
pass
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
import cryptography.fernet
|
||||||
|
import argon2
|
||||||
|
import base64
|
||||||
|
import random
|
||||||
|
|
||||||
|
# def encrypt_data(data_bytes, password, salt):
|
||||||
|
# password_hash = argon2.argon2_hash(password=password, salt=salt)
|
||||||
|
# encoded_hash = base64.urlsafe_b64encode(password_hash[:32])
|
||||||
|
# encryptor = cryptography.fernet.Fernet(encoded_hash)
|
||||||
|
# return encryptor.encrypt(data_bytes)
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def decrypt_data(cipher_bytes, password, salt):
|
||||||
|
# password_hash = argon2.argon2_hash(password=password, salt=salt)
|
||||||
|
# encoded_hash = base64.urlsafe_b64encode(password_hash[:32])
|
||||||
|
# decryptor = cryptography.fernet.Fernet(encoded_hash)
|
||||||
|
# return decryptor.decrypt(cipher_bytes)
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
from getpass import getpass
|
||||||
|
master_secret_key = getpass('tell me the master secret key you are going to use')
|
||||||
|
salt = bcrypt.gensalt()
|
||||||
|
combo_password = "1234'".encode('utf-8') + salt + master_secret_key
|
||||||
|
hashed_password = bcrypt.hashpw(combo_password, salt)
|
||||||
|
|
||||||
|
|
||||||
|
bcrypt
|
|
@ -0,0 +1,11 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="PYTHON_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||||
|
<exclude-output />
|
||||||
|
<content url="file://$MODULE_DIR$">
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||||
|
</content>
|
||||||
|
<orderEntry type="inheritedJdk" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
</module>
|
|
@ -0,0 +1,22 @@
|
||||||
|
import paramiko, os, subprocess, interactive
|
||||||
|
|
||||||
|
|
||||||
|
sshClient = paramiko.SSHClient()
|
||||||
|
sshClient.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
sshClient.connect('linux-lab-055.ece.uw.edu', username='hyu3', password=':Eric200002182919')
|
||||||
|
|
||||||
|
# ssh_stdin, ssh_stdout, ssh_stderr = sshClient.exec_command("/homes/hyu3/anaconda3/envs/Documents/bin/python /homes/hyu3/Documents/dummy.py")
|
||||||
|
# print(ssh_stdout.readlines())
|
||||||
|
#
|
||||||
|
# ssh_stdin, ssh_stdout, ssh_stderr = sshClient.exec_command("a")
|
||||||
|
# print(ssh_stdout.readlines())
|
||||||
|
#
|
||||||
|
# ssh_stdin, ssh_stdout, ssh_stderr = sshClient.exec_command("b")
|
||||||
|
# print(ssh_stdout.readlines())
|
||||||
|
|
||||||
|
channel = sshClient.get_transport().open_session()
|
||||||
|
channel.get_pty()
|
||||||
|
channel.invoke_shell()
|
||||||
|
interactive.interactive_shell(channel)
|
||||||
|
sshClient.close()
|
||||||
|
|
|
@ -0,0 +1,124 @@
|
||||||
|
import paramiko, os, sys, subprocess, get_runtime, wx # , keyring
|
||||||
|
from typing import NoReturn
|
||||||
|
from ui import Frame
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
host = frame.host.GetValue()
|
||||||
|
localhost = frame.localhost.GetValue()
|
||||||
|
netid = frame.net_id.GetValue()
|
||||||
|
sshPass = frame.password.GetValue()
|
||||||
|
password = frame.vnc_passwd.GetValue()
|
||||||
|
kill_when_exit = frame.kill_when_exit.GetValue()
|
||||||
|
|
||||||
|
if not host or not localhost or not netid or not sshPass or not password:
|
||||||
|
wx.MessageBox("At least one of the box are left empty. Please make sure you fill out ALL boxes!!",
|
||||||
|
"Empty Input", wx.ICON_ERROR)
|
||||||
|
return
|
||||||
|
frame.run.Enable(False)
|
||||||
|
ssh = paramiko.SSHClient()
|
||||||
|
sys.stdout.write(f'connecting to {host}\n')
|
||||||
|
frame.status.SetLabelText(f'connecting to host')
|
||||||
|
port = get_server(ssh, host, netid, sshPass, password)
|
||||||
|
if int(port) == -1:
|
||||||
|
wx.MessageBox("Your netID and password for that netID does not match the one in the ECE system!", "Wrong "
|
||||||
|
"Credentials", wx.ICON_ERROR)
|
||||||
|
frame.run.Enable(True)
|
||||||
|
frame.status.SetLabelText(u"Click 'RUN' to begin, hover over any boxes to show hints")
|
||||||
|
return
|
||||||
|
display = port
|
||||||
|
if int(port) < 10:
|
||||||
|
port = "0" + port
|
||||||
|
port = "59" + port
|
||||||
|
java = os.path.join(get_runtime.RUNTIME_PATH, get_runtime.get_java_path())
|
||||||
|
frame.status.SetLabelText(f'initializing java')
|
||||||
|
vnc_cmd = f'{java} -jar VNC_UWEE.jar -host="{localhost}" -port="{port}" -sshHost="{host}" -sshUser="{netid}" -sshPassword="{sshPass}" -password="{password}"'
|
||||||
|
frame.status.SetLabelText(f'VNC session started!')
|
||||||
|
out = subprocess.Popen(vnc_cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
stdout, stderr = out.communicate()
|
||||||
|
open("launcher.log", "w").write(stderr.decode('utf-8'))
|
||||||
|
if out.returncode:
|
||||||
|
sys.stderr.write(f"{stderr.decode('utf-8')}\nProgram crashed with code {out.returncode}")
|
||||||
|
|
||||||
|
if kill_when_exit:
|
||||||
|
kill_srv(ssh, host, netid, sshPass, display)
|
||||||
|
frame.run.Enable(True)
|
||||||
|
frame.status.SetLabelText(u"Click 'RUN' to begin, hover over any boxes to show hints")
|
||||||
|
|
||||||
|
|
||||||
|
def get_server(client: paramiko.SSHClient, host: str, username: str, password: str, vnc_passwd: str) -> str:
|
||||||
|
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
try:
|
||||||
|
client.connect(host, username=username, password=password)
|
||||||
|
except paramiko.ssh_exception.AuthenticationException:
|
||||||
|
return '-1'
|
||||||
|
sys.stdout.write("connected, sending command\n")
|
||||||
|
ssh_stdin, ssh_stdout, ssh_stderr = client.exec_command("vncserver && vncserver -list")
|
||||||
|
ssh_stdin.channel.shutdown_write()
|
||||||
|
|
||||||
|
if "You will require a password to access your desktops" in ssh_stderr.read().decode('utf-8'):
|
||||||
|
# create the vncserver's password if not done so, then create the session again and just return, since ssh_stdout did not refreshes outside the if
|
||||||
|
# TODO: add option that user can have a view-only password
|
||||||
|
ssh_stdin, ssh_stdout, ssh_stderr = client.exec_command("vncpasswd")
|
||||||
|
ssh_stdin.write(f"{vnc_passwd}\n")
|
||||||
|
ssh_stdin.write(f"{vnc_passwd}\n")
|
||||||
|
ssh_stdin.write("n\n")
|
||||||
|
ssh_stdin.flush()
|
||||||
|
ssh_stdin, ssh_stdout, ssh_stderr = client.exec_command("vncserver && vncserver -list")
|
||||||
|
return ssh_stdout.readlines()[-1].split()[0][1:]
|
||||||
|
return ssh_stdout.readlines()[-1].split()[0][1:]
|
||||||
|
|
||||||
|
|
||||||
|
def kill_srv(client: paramiko.SSHClient, host: str, username: str, password: str, port: str) -> NoReturn:
|
||||||
|
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
client.connect(host, username=username, password=password)
|
||||||
|
client.exec_command(f"vncserver -kill :{port}")
|
||||||
|
|
||||||
|
|
||||||
|
def on_chk(e):
|
||||||
|
if frame.localhost_ck.GetValue():
|
||||||
|
frame.localhost.Enable(False)
|
||||||
|
frame.localhost.SetValue("127.0.0.1")
|
||||||
|
else:
|
||||||
|
frame.localhost.Enable(True)
|
||||||
|
|
||||||
|
|
||||||
|
def run(e):
|
||||||
|
# wrapper for main to ensure is not frozen I guess
|
||||||
|
task = Thread(target=main)
|
||||||
|
task.setDaemon(True)
|
||||||
|
task.start()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
console_title = 'Program Crashed! Here are the details' # this is there for now to redirect any errors
|
||||||
|
app = wx.App(useBestVisual=True)
|
||||||
|
frame = Frame(None)
|
||||||
|
app.SetTopWindow(frame)
|
||||||
|
# set where the uncaught errors should be displayed
|
||||||
|
console = wx.PyOnDemandOutputWindow(console_title)
|
||||||
|
console.SetParent(frame)
|
||||||
|
# sys.stderr = console
|
||||||
|
frame.SetTitle("ECE-SSH_dev_1.1")
|
||||||
|
frame.run.Bind(wx.EVT_BUTTON, run)
|
||||||
|
frame.Bind(wx.EVT_CHECKBOX, on_chk)
|
||||||
|
frame.Show()
|
||||||
|
app.MainLoop()
|
||||||
|
|
||||||
|
# MAGIC_USERNAME_KEY = 'im_the_magic_username_key'
|
||||||
|
#
|
||||||
|
# # the service is just a namespace for your app
|
||||||
|
# service_id = 'IM_YOUR_APP!'
|
||||||
|
#
|
||||||
|
# username = 'dustin'
|
||||||
|
#
|
||||||
|
# # save password
|
||||||
|
# keyring.set_password(service_id, username, "password")
|
||||||
|
#
|
||||||
|
# # optionally, abuse `set_password` to save username onto keyring
|
||||||
|
# # we're just using some known magic string in the username field
|
||||||
|
# keyring.set_password(service_id, MAGIC_USERNAME_KEY, username)
|
||||||
|
# # username = keyring.get_password(service_id, MAGIC_USERNAME_KEY)
|
||||||
|
# # password = keyring.get_password(service_id, username)
|
||||||
|
# # print(username, password)
|
|
@ -0,0 +1,127 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
###########################################################################
|
||||||
|
## Python code generated with wxFormBuilder (version Jun 17 2015)
|
||||||
|
## http://www.wxformbuilder.org/
|
||||||
|
##
|
||||||
|
## PLEASE DO "NOT" EDIT THIS FILE!
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
import wx
|
||||||
|
import wx.xrc
|
||||||
|
|
||||||
|
###########################################################################
|
||||||
|
## Class Frame
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
class Frame ( wx.Frame ):
|
||||||
|
|
||||||
|
def __init__( self, parent ):
|
||||||
|
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 343,295 ), style = wx.DEFAULT_FRAME_STYLE )
|
||||||
|
|
||||||
|
self.SetSizeHintsSz( wx.Size( 300,274 ), wx.DefaultSize )
|
||||||
|
self.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNTEXT ) )
|
||||||
|
self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
|
||||||
|
|
||||||
|
bSizer1 = wx.BoxSizer( wx.VERTICAL )
|
||||||
|
|
||||||
|
bSizer1.SetMinSize( wx.Size( 372,284 ) )
|
||||||
|
id_field = wx.FlexGridSizer( 3, 2, 0, 0 )
|
||||||
|
id_field.SetFlexibleDirection( wx.BOTH )
|
||||||
|
id_field.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_ALL )
|
||||||
|
|
||||||
|
self.netio_prompt = wx.StaticText( self, wx.ID_ANY, u"UW Net-ID", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.netio_prompt.Wrap( -1 )
|
||||||
|
id_field.Add( self.netio_prompt, 0, wx.ALIGN_RIGHT|wx.ALL, 5 )
|
||||||
|
|
||||||
|
self.net_id = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.FULL_REPAINT_ON_RESIZE|wx.WANTS_CHARS )
|
||||||
|
self.net_id.SetToolTipString( u"Your UW Net ID, WITHOUT @uw.edu" )
|
||||||
|
|
||||||
|
id_field.Add( self.net_id, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL|wx.EXPAND, 5 )
|
||||||
|
|
||||||
|
self.password_promot = wx.StaticText( self, wx.ID_ANY, u"Password", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.password_promot.Wrap( -1 )
|
||||||
|
id_field.Add( self.password_promot, 0, wx.ALL|wx.ALIGN_RIGHT, 5 )
|
||||||
|
|
||||||
|
self.password = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PASSWORD|wx.FULL_REPAINT_ON_RESIZE )
|
||||||
|
self.password.SetToolTipString( u"The password you use to login MyUW/MyPlan/Canvas and stuff at UW" )
|
||||||
|
|
||||||
|
id_field.Add( self.password, 0, wx.ALL, 5 )
|
||||||
|
|
||||||
|
self.vncPassword_prompt1 = wx.StaticText( self, wx.ID_ANY, u"VNC Password", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.vncPassword_prompt1.Wrap( -1 )
|
||||||
|
id_field.Add( self.vncPassword_prompt1, 0, wx.ALL|wx.ALIGN_RIGHT, 5 )
|
||||||
|
|
||||||
|
self.vnc_passwd = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PASSWORD|wx.FULL_REPAINT_ON_RESIZE )
|
||||||
|
self.vnc_passwd.SetToolTipString( u"Password you set for your VNC session. If you forgot or don't know or your first time using the ECE remote linux, just type in a password you would like to set/reset" )
|
||||||
|
|
||||||
|
id_field.Add( self.vnc_passwd, 0, wx.ALL, 5 )
|
||||||
|
|
||||||
|
|
||||||
|
bSizer1.Add( id_field, 1, wx.ALIGN_CENTER_HORIZONTAL, 0 )
|
||||||
|
|
||||||
|
inputArea = wx.GridBagSizer( 0, 0 )
|
||||||
|
inputArea.SetFlexibleDirection( wx.BOTH )
|
||||||
|
inputArea.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
|
||||||
|
|
||||||
|
self.localhos_prompt = wx.StaticText( self, wx.ID_ANY, u"Local IP", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.localhos_prompt.Wrap( -1 )
|
||||||
|
inputArea.Add( self.localhos_prompt, wx.GBPosition( 4, 0 ), wx.GBSpan( 1, 1 ), wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 )
|
||||||
|
|
||||||
|
self.localhost = wx.TextCtrl( self, wx.ID_ANY, u"127.0.0.1", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.localhost.Enable( False )
|
||||||
|
self.localhost.SetToolTipString( u"ip address on your computer that the linux server's data will sent to. Normally you won't need to worry about this one" )
|
||||||
|
|
||||||
|
inputArea.Add( self.localhost, wx.GBPosition( 4, 1 ), wx.GBSpan( 1, 1 ), wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
|
||||||
|
|
||||||
|
self.localhost_ck = wx.CheckBox( self, wx.ID_ANY, u"Use localhost", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.localhost_ck.SetValue(True)
|
||||||
|
inputArea.Add( self.localhost_ck, wx.GBPosition( 3, 1 ), wx.GBSpan( 1, 1 ), wx.ALIGN_CENTER_VERTICAL, 5 )
|
||||||
|
|
||||||
|
|
||||||
|
bSizer1.Add( inputArea, 0, wx.ALIGN_CENTER|wx.ALL, 5 )
|
||||||
|
|
||||||
|
gbSizer6 = wx.GridBagSizer( 0, 0 )
|
||||||
|
gbSizer6.SetFlexibleDirection( wx.BOTH )
|
||||||
|
gbSizer6.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
|
||||||
|
|
||||||
|
self.host_prompt = wx.StaticText( self, wx.ID_ANY, u"Server", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.host_prompt.Wrap( -1 )
|
||||||
|
gbSizer6.Add( self.host_prompt, wx.GBPosition( 0, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
|
||||||
|
|
||||||
|
hostChoices = [ u"linuxsrv01.ece.uw.edu", u"linux-lab-041.ece.uw.edu", u"linux-lab-042.ece.uw.edu", u"linux-lab-043.ece.uw.edu", u"linux-lab-044.ece.uw.edu", u"linux-lab-045.ece.uw.edu", u"linux-lab-046.ece.uw.edu", u"linux-lab-047.ece.uw.edu", u"linux-lab-048.ece.uw.edu", u"linux-lab-049.ece.uw.edu", u"linux-lab-050.ece.uw.edu", u"linux-lab-051.ece.uw.edu", u"linux-lab-052.ece.uw.edu", u"linux-lab-054.ece.uw.edu", u"linux-lab-055.ece.uw.edu", u"linux-lab-056.ece.uw.edu", u"linux-lab-057.ece.uw.edu", u"linux-lab-058.ece.uw.edu", u"linux-lab-059.ece.uw.edu", u"linux-lab-060.ece.uw.edu", u"linux-lab-061.ece.uw.edu", u"linux-lab-062.ece.uw.edu", u"linux-lab-063.ece.uw.edu", u"linux-lab-064.ece.uw.edu", u"linux-lab-065.ece.uw.edu", u"linux-lab-066.ece.uw.edu", u"linux-lab-067.ece.uw.edu", u"linux-lab-069.ece.uw.edu", u"linux-lab-070.ece.uw.edu", u"linux-lab-071.ece.uw.edu", u"linux-lab-072.ece.uw.edu", u"linux-lab-073.ece.uw.edu", u"linux-lab-074.ece.uw.edu", u"linux-lab-075.ece.uw.edu", u"linux-lab-076.ece.uw.edu", u"linux-lab-077.ece.uw.edu", u"linux-lab-078.ece.uw.edu", u"linux-lab-079.ece.uw.edu", u"linux-lab-080.ece.uw.edu", u"linux-lab-078.ece.uw.edu", u"linux-lab-079.ece.uw.edu" ]
|
||||||
|
self.host = wx.ComboBox( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, hostChoices, 0 )
|
||||||
|
self.host.SetSelection( 0 )
|
||||||
|
gbSizer6.Add( self.host, wx.GBPosition( 0, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
|
||||||
|
|
||||||
|
|
||||||
|
bSizer1.Add( gbSizer6, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
|
||||||
|
|
||||||
|
self.run = wx.Button( self, wx.ID_ANY, u"RUN", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
bSizer1.Add( self.run, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
|
||||||
|
|
||||||
|
gbSizer8 = wx.GridBagSizer( 0, 0 )
|
||||||
|
gbSizer8.SetFlexibleDirection( wx.BOTH )
|
||||||
|
gbSizer8.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
|
||||||
|
|
||||||
|
self.kill_when_exit = wx.CheckBox( self, wx.ID_ANY, u"\"kill\" process when exit", wx.DefaultPosition, wx.DefaultSize, 0 )
|
||||||
|
self.kill_when_exit.SetValue(True)
|
||||||
|
gbSizer8.Add( self.kill_when_exit, wx.GBPosition( 0, 0 ), wx.GBSpan( 1, 1 ), wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 )
|
||||||
|
|
||||||
|
self.status = wx.StaticText( self, wx.ID_ANY, u"Click 'RUN' to begin, hover over any boxes to show hints", wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_CENTRE )
|
||||||
|
self.status.Wrap( -1 )
|
||||||
|
gbSizer8.Add( self.status, wx.GBPosition( 1, 0 ), wx.GBSpan( 1, 1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.ALIGN_CENTER_VERTICAL, 5 )
|
||||||
|
|
||||||
|
|
||||||
|
bSizer1.Add( gbSizer8, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
|
||||||
|
|
||||||
|
|
||||||
|
self.SetSizer( bSizer1 )
|
||||||
|
self.Layout()
|
||||||
|
|
||||||
|
self.Centre( wx.BOTH )
|
||||||
|
|
||||||
|
def __del__( self ):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,222 @@
|
||||||
|
# don't import any costly modules
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||||
|
|
||||||
|
|
||||||
|
def warn_distutils_present():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
if is_pypy and sys.version_info < (3, 7):
|
||||||
|
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||||
|
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||||
|
return
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||||
|
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||||
|
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||||
|
"using distutils directly, ensure that setuptools is installed in the "
|
||||||
|
"traditional way (e.g. not an editable install), and/or make sure "
|
||||||
|
"that setuptools is always imported before distutils."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def clear_distutils():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn("Setuptools is replacing distutils.")
|
||||||
|
mods = [
|
||||||
|
name
|
||||||
|
for name in sys.modules
|
||||||
|
if name == "distutils" or name.startswith("distutils.")
|
||||||
|
]
|
||||||
|
for name in mods:
|
||||||
|
del sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
def enabled():
|
||||||
|
"""
|
||||||
|
Allow selection of distutils by environment variable.
|
||||||
|
"""
|
||||||
|
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
||||||
|
return which == 'local'
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_local_distutils():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
clear_distutils()
|
||||||
|
|
||||||
|
# With the DistutilsMetaFinder in place,
|
||||||
|
# perform an import to cause distutils to be
|
||||||
|
# loaded from setuptools._distutils. Ref #2906.
|
||||||
|
with shim():
|
||||||
|
importlib.import_module('distutils')
|
||||||
|
|
||||||
|
# check that submodules load as expected
|
||||||
|
core = importlib.import_module('distutils.core')
|
||||||
|
assert '_distutils' in core.__file__, core.__file__
|
||||||
|
assert 'setuptools._distutils.log' not in sys.modules
|
||||||
|
|
||||||
|
|
||||||
|
def do_override():
|
||||||
|
"""
|
||||||
|
Ensure that the local copy of distutils is preferred over stdlib.
|
||||||
|
|
||||||
|
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||||
|
for more motivation.
|
||||||
|
"""
|
||||||
|
if enabled():
|
||||||
|
warn_distutils_present()
|
||||||
|
ensure_local_distutils()
|
||||||
|
|
||||||
|
|
||||||
|
class _TrivialRe:
|
||||||
|
def __init__(self, *patterns):
|
||||||
|
self._patterns = patterns
|
||||||
|
|
||||||
|
def match(self, string):
|
||||||
|
return all(pat in string for pat in self._patterns)
|
||||||
|
|
||||||
|
|
||||||
|
class DistutilsMetaFinder:
|
||||||
|
def find_spec(self, fullname, path, target=None):
|
||||||
|
# optimization: only consider top level modules and those
|
||||||
|
# found in the CPython test suite.
|
||||||
|
if path is not None and not fullname.startswith('test.'):
|
||||||
|
return
|
||||||
|
|
||||||
|
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||||
|
method = getattr(self, method_name, lambda: None)
|
||||||
|
return method()
|
||||||
|
|
||||||
|
def spec_for_distutils(self):
|
||||||
|
if self.is_cpython():
|
||||||
|
return
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import importlib.abc
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
try:
|
||||||
|
mod = importlib.import_module('setuptools._distutils')
|
||||||
|
except Exception:
|
||||||
|
# There are a couple of cases where setuptools._distutils
|
||||||
|
# may not be present:
|
||||||
|
# - An older Setuptools without a local distutils is
|
||||||
|
# taking precedence. Ref #2957.
|
||||||
|
# - Path manipulation during sitecustomize removes
|
||||||
|
# setuptools from the path but only after the hook
|
||||||
|
# has been loaded. Ref #2980.
|
||||||
|
# In either case, fall back to stdlib behavior.
|
||||||
|
return
|
||||||
|
|
||||||
|
class DistutilsLoader(importlib.abc.Loader):
|
||||||
|
def create_module(self, spec):
|
||||||
|
mod.__name__ = 'distutils'
|
||||||
|
return mod
|
||||||
|
|
||||||
|
def exec_module(self, module):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return importlib.util.spec_from_loader(
|
||||||
|
'distutils', DistutilsLoader(), origin=mod.__file__
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_cpython():
|
||||||
|
"""
|
||||||
|
Suppress supplying distutils for CPython (build and tests).
|
||||||
|
Ref #2965 and #3007.
|
||||||
|
"""
|
||||||
|
return os.path.isfile('pybuilddir.txt')
|
||||||
|
|
||||||
|
def spec_for_pip(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running under pip.
|
||||||
|
See pypa/pip#8761 for rationale.
|
||||||
|
"""
|
||||||
|
if self.pip_imported_during_build():
|
||||||
|
return
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pip_imported_during_build(cls):
|
||||||
|
"""
|
||||||
|
Detect if pip is being imported in a build script. Ref #2355.
|
||||||
|
"""
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
return any(
|
||||||
|
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def frame_file_is_setup(frame):
|
||||||
|
"""
|
||||||
|
Return True if the indicated frame suggests a setup.py file.
|
||||||
|
"""
|
||||||
|
# some frames may not have __file__ (#2940)
|
||||||
|
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
||||||
|
|
||||||
|
def spec_for_sensitive_tests(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running select tests under CPython.
|
||||||
|
|
||||||
|
python/cpython#91169
|
||||||
|
"""
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
sensitive_tests = (
|
||||||
|
[
|
||||||
|
'test.test_distutils',
|
||||||
|
'test.test_peg_generator',
|
||||||
|
'test.test_importlib',
|
||||||
|
]
|
||||||
|
if sys.version_info < (3, 10)
|
||||||
|
else [
|
||||||
|
'test.test_distutils',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
for name in DistutilsMetaFinder.sensitive_tests:
|
||||||
|
setattr(
|
||||||
|
DistutilsMetaFinder,
|
||||||
|
f'spec_for_{name}',
|
||||||
|
DistutilsMetaFinder.spec_for_sensitive_tests,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||||
|
|
||||||
|
|
||||||
|
def add_shim():
|
||||||
|
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
||||||
|
|
||||||
|
|
||||||
|
class shim:
|
||||||
|
def __enter__(self):
|
||||||
|
insert_shim()
|
||||||
|
|
||||||
|
def __exit__(self, exc, value, tb):
|
||||||
|
remove_shim()
|
||||||
|
|
||||||
|
|
||||||
|
def insert_shim():
|
||||||
|
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_shim():
|
||||||
|
try:
|
||||||
|
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
|
@ -0,0 +1 @@
|
||||||
|
__import__('_distutils_hack').do_override()
|
|
@ -0,0 +1,14 @@
|
||||||
|
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
|
||||||
|
'FFIError']
|
||||||
|
|
||||||
|
from .api import FFI
|
||||||
|
from .error import CDefError, FFIError, VerificationError, VerificationMissing
|
||||||
|
from .error import PkgConfigError
|
||||||
|
|
||||||
|
__version__ = "1.15.1"
|
||||||
|
__version_info__ = (1, 15, 1)
|
||||||
|
|
||||||
|
# The verifier module file names are based on the CRC32 of a string that
|
||||||
|
# contains the following version number. It may be older than __version__
|
||||||
|
# if nothing is clearly incompatible.
|
||||||
|
__version_verifier_modules__ = "0.8.6"
|
|
@ -0,0 +1,965 @@
|
||||||
|
import sys, types
|
||||||
|
from .lock import allocate_lock
|
||||||
|
from .error import CDefError
|
||||||
|
from . import model
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable
|
||||||
|
except NameError:
|
||||||
|
# Python 3.1
|
||||||
|
from collections import Callable
|
||||||
|
callable = lambda x: isinstance(x, Callable)
|
||||||
|
|
||||||
|
try:
|
||||||
|
basestring
|
||||||
|
except NameError:
|
||||||
|
# Python 3.x
|
||||||
|
basestring = str
|
||||||
|
|
||||||
|
_unspecified = object()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class FFI(object):
|
||||||
|
r'''
|
||||||
|
The main top-level class that you instantiate once, or once per module.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
|
||||||
|
ffi = FFI()
|
||||||
|
ffi.cdef("""
|
||||||
|
int printf(const char *, ...);
|
||||||
|
""")
|
||||||
|
|
||||||
|
C = ffi.dlopen(None) # standard library
|
||||||
|
-or-
|
||||||
|
C = ffi.verify() # use a C compiler: verify the decl above is right
|
||||||
|
|
||||||
|
C.printf("hello, %s!\n", ffi.new("char[]", "world"))
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, backend=None):
|
||||||
|
"""Create an FFI instance. The 'backend' argument is used to
|
||||||
|
select a non-default backend, mostly for tests.
|
||||||
|
"""
|
||||||
|
if backend is None:
|
||||||
|
# You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
|
||||||
|
# _cffi_backend.so compiled.
|
||||||
|
import _cffi_backend as backend
|
||||||
|
from . import __version__
|
||||||
|
if backend.__version__ != __version__:
|
||||||
|
# bad version! Try to be as explicit as possible.
|
||||||
|
if hasattr(backend, '__file__'):
|
||||||
|
# CPython
|
||||||
|
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
|
||||||
|
__version__, __file__,
|
||||||
|
backend.__version__, backend.__file__))
|
||||||
|
else:
|
||||||
|
# PyPy
|
||||||
|
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
|
||||||
|
__version__, __file__, backend.__version__))
|
||||||
|
# (If you insist you can also try to pass the option
|
||||||
|
# 'backend=backend_ctypes.CTypesBackend()', but don't
|
||||||
|
# rely on it! It's probably not going to work well.)
|
||||||
|
|
||||||
|
from . import cparser
|
||||||
|
self._backend = backend
|
||||||
|
self._lock = allocate_lock()
|
||||||
|
self._parser = cparser.Parser()
|
||||||
|
self._cached_btypes = {}
|
||||||
|
self._parsed_types = types.ModuleType('parsed_types').__dict__
|
||||||
|
self._new_types = types.ModuleType('new_types').__dict__
|
||||||
|
self._function_caches = []
|
||||||
|
self._libraries = []
|
||||||
|
self._cdefsources = []
|
||||||
|
self._included_ffis = []
|
||||||
|
self._windows_unicode = None
|
||||||
|
self._init_once_cache = {}
|
||||||
|
self._cdef_version = None
|
||||||
|
self._embedding = None
|
||||||
|
self._typecache = model.get_typecache(backend)
|
||||||
|
if hasattr(backend, 'set_ffi'):
|
||||||
|
backend.set_ffi(self)
|
||||||
|
for name in list(backend.__dict__):
|
||||||
|
if name.startswith('RTLD_'):
|
||||||
|
setattr(self, name, getattr(backend, name))
|
||||||
|
#
|
||||||
|
with self._lock:
|
||||||
|
self.BVoidP = self._get_cached_btype(model.voidp_type)
|
||||||
|
self.BCharA = self._get_cached_btype(model.char_array_type)
|
||||||
|
if isinstance(backend, types.ModuleType):
|
||||||
|
# _cffi_backend: attach these constants to the class
|
||||||
|
if not hasattr(FFI, 'NULL'):
|
||||||
|
FFI.NULL = self.cast(self.BVoidP, 0)
|
||||||
|
FFI.CData, FFI.CType = backend._get_types()
|
||||||
|
else:
|
||||||
|
# ctypes backend: attach these constants to the instance
|
||||||
|
self.NULL = self.cast(self.BVoidP, 0)
|
||||||
|
self.CData, self.CType = backend._get_types()
|
||||||
|
self.buffer = backend.buffer
|
||||||
|
|
||||||
|
def cdef(self, csource, override=False, packed=False, pack=None):
|
||||||
|
"""Parse the given C source. This registers all declared functions,
|
||||||
|
types, and global variables. The functions and global variables can
|
||||||
|
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
|
||||||
|
The types can be used in 'ffi.new()' and other functions.
|
||||||
|
If 'packed' is specified as True, all structs declared inside this
|
||||||
|
cdef are packed, i.e. laid out without any field alignment at all.
|
||||||
|
Alternatively, 'pack' can be a small integer, and requests for
|
||||||
|
alignment greater than that are ignored (pack=1 is equivalent to
|
||||||
|
packed=True).
|
||||||
|
"""
|
||||||
|
self._cdef(csource, override=override, packed=packed, pack=pack)
|
||||||
|
|
||||||
|
def embedding_api(self, csource, packed=False, pack=None):
|
||||||
|
self._cdef(csource, packed=packed, pack=pack, dllexport=True)
|
||||||
|
if self._embedding is None:
|
||||||
|
self._embedding = ''
|
||||||
|
|
||||||
|
def _cdef(self, csource, override=False, **options):
|
||||||
|
if not isinstance(csource, str): # unicode, on Python 2
|
||||||
|
if not isinstance(csource, basestring):
|
||||||
|
raise TypeError("cdef() argument must be a string")
|
||||||
|
csource = csource.encode('ascii')
|
||||||
|
with self._lock:
|
||||||
|
self._cdef_version = object()
|
||||||
|
self._parser.parse(csource, override=override, **options)
|
||||||
|
self._cdefsources.append(csource)
|
||||||
|
if override:
|
||||||
|
for cache in self._function_caches:
|
||||||
|
cache.clear()
|
||||||
|
finishlist = self._parser._recomplete
|
||||||
|
if finishlist:
|
||||||
|
self._parser._recomplete = []
|
||||||
|
for tp in finishlist:
|
||||||
|
tp.finish_backend_type(self, finishlist)
|
||||||
|
|
||||||
|
def dlopen(self, name, flags=0):
|
||||||
|
"""Load and return a dynamic library identified by 'name'.
|
||||||
|
The standard C library can be loaded by passing None.
|
||||||
|
Note that functions and types declared by 'ffi.cdef()' are not
|
||||||
|
linked to a particular library, just like C headers; in the
|
||||||
|
library we only look for the actual (untyped) symbols.
|
||||||
|
"""
|
||||||
|
if not (isinstance(name, basestring) or
|
||||||
|
name is None or
|
||||||
|
isinstance(name, self.CData)):
|
||||||
|
raise TypeError("dlopen(name): name must be a file name, None, "
|
||||||
|
"or an already-opened 'void *' handle")
|
||||||
|
with self._lock:
|
||||||
|
lib, function_cache = _make_ffi_library(self, name, flags)
|
||||||
|
self._function_caches.append(function_cache)
|
||||||
|
self._libraries.append(lib)
|
||||||
|
return lib
|
||||||
|
|
||||||
|
def dlclose(self, lib):
|
||||||
|
"""Close a library obtained with ffi.dlopen(). After this call,
|
||||||
|
access to functions or variables from the library will fail
|
||||||
|
(possibly with a segmentation fault).
|
||||||
|
"""
|
||||||
|
type(lib).__cffi_close__(lib)
|
||||||
|
|
||||||
|
def _typeof_locked(self, cdecl):
|
||||||
|
# call me with the lock!
|
||||||
|
key = cdecl
|
||||||
|
if key in self._parsed_types:
|
||||||
|
return self._parsed_types[key]
|
||||||
|
#
|
||||||
|
if not isinstance(cdecl, str): # unicode, on Python 2
|
||||||
|
cdecl = cdecl.encode('ascii')
|
||||||
|
#
|
||||||
|
type = self._parser.parse_type(cdecl)
|
||||||
|
really_a_function_type = type.is_raw_function
|
||||||
|
if really_a_function_type:
|
||||||
|
type = type.as_function_pointer()
|
||||||
|
btype = self._get_cached_btype(type)
|
||||||
|
result = btype, really_a_function_type
|
||||||
|
self._parsed_types[key] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _typeof(self, cdecl, consider_function_as_funcptr=False):
|
||||||
|
# string -> ctype object
|
||||||
|
try:
|
||||||
|
result = self._parsed_types[cdecl]
|
||||||
|
except KeyError:
|
||||||
|
with self._lock:
|
||||||
|
result = self._typeof_locked(cdecl)
|
||||||
|
#
|
||||||
|
btype, really_a_function_type = result
|
||||||
|
if really_a_function_type and not consider_function_as_funcptr:
|
||||||
|
raise CDefError("the type %r is a function type, not a "
|
||||||
|
"pointer-to-function type" % (cdecl,))
|
||||||
|
return btype
|
||||||
|
|
||||||
|
def typeof(self, cdecl):
|
||||||
|
"""Parse the C type given as a string and return the
|
||||||
|
corresponding <ctype> object.
|
||||||
|
It can also be used on 'cdata' instance to get its C type.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
return self._typeof(cdecl)
|
||||||
|
if isinstance(cdecl, self.CData):
|
||||||
|
return self._backend.typeof(cdecl)
|
||||||
|
if isinstance(cdecl, types.BuiltinFunctionType):
|
||||||
|
res = _builtin_function_type(cdecl)
|
||||||
|
if res is not None:
|
||||||
|
return res
|
||||||
|
if (isinstance(cdecl, types.FunctionType)
|
||||||
|
and hasattr(cdecl, '_cffi_base_type')):
|
||||||
|
with self._lock:
|
||||||
|
return self._get_cached_btype(cdecl._cffi_base_type)
|
||||||
|
raise TypeError(type(cdecl))
|
||||||
|
|
||||||
|
def sizeof(self, cdecl):
|
||||||
|
"""Return the size in bytes of the argument. It can be a
|
||||||
|
string naming a C type, or a 'cdata' instance.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
BType = self._typeof(cdecl)
|
||||||
|
return self._backend.sizeof(BType)
|
||||||
|
else:
|
||||||
|
return self._backend.sizeof(cdecl)
|
||||||
|
|
||||||
|
def alignof(self, cdecl):
|
||||||
|
"""Return the natural alignment size in bytes of the C type
|
||||||
|
given as a string.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._backend.alignof(cdecl)
|
||||||
|
|
||||||
|
def offsetof(self, cdecl, *fields_or_indexes):
|
||||||
|
"""Return the offset of the named field inside the given
|
||||||
|
structure or array, which must be given as a C type name.
|
||||||
|
You can give several field names in case of nested structures.
|
||||||
|
You can also give numeric values which correspond to array
|
||||||
|
items, in case of an array type.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
|
||||||
|
|
||||||
|
def new(self, cdecl, init=None):
|
||||||
|
"""Allocate an instance according to the specified C type and
|
||||||
|
return a pointer to it. The specified C type must be either a
|
||||||
|
pointer or an array: ``new('X *')`` allocates an X and returns
|
||||||
|
a pointer to it, whereas ``new('X[n]')`` allocates an array of
|
||||||
|
n X'es and returns an array referencing it (which works
|
||||||
|
mostly like a pointer, like in C). You can also use
|
||||||
|
``new('X[]', n)`` to allocate an array of a non-constant
|
||||||
|
length n.
|
||||||
|
|
||||||
|
The memory is initialized following the rules of declaring a
|
||||||
|
global variable in C: by default it is zero-initialized, but
|
||||||
|
an explicit initializer can be given which can be used to
|
||||||
|
fill all or part of the memory.
|
||||||
|
|
||||||
|
When the returned <cdata> object goes out of scope, the memory
|
||||||
|
is freed. In other words the returned <cdata> object has
|
||||||
|
ownership of the value of type 'cdecl' that it points to. This
|
||||||
|
means that the raw data can be used as long as this object is
|
||||||
|
kept alive, but must not be used for a longer time. Be careful
|
||||||
|
about that when copying the pointer to the memory somewhere
|
||||||
|
else, e.g. into another structure.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._backend.newp(cdecl, init)
|
||||||
|
|
||||||
|
def new_allocator(self, alloc=None, free=None,
|
||||||
|
should_clear_after_alloc=True):
|
||||||
|
"""Return a new allocator, i.e. a function that behaves like ffi.new()
|
||||||
|
but uses the provided low-level 'alloc' and 'free' functions.
|
||||||
|
|
||||||
|
'alloc' is called with the size as argument. If it returns NULL, a
|
||||||
|
MemoryError is raised. 'free' is called with the result of 'alloc'
|
||||||
|
as argument. Both can be either Python function or directly C
|
||||||
|
functions. If 'free' is None, then no free function is called.
|
||||||
|
If both 'alloc' and 'free' are None, the default is used.
|
||||||
|
|
||||||
|
If 'should_clear_after_alloc' is set to False, then the memory
|
||||||
|
returned by 'alloc' is assumed to be already cleared (or you are
|
||||||
|
fine with garbage); otherwise CFFI will clear it.
|
||||||
|
"""
|
||||||
|
compiled_ffi = self._backend.FFI()
|
||||||
|
allocator = compiled_ffi.new_allocator(alloc, free,
|
||||||
|
should_clear_after_alloc)
|
||||||
|
def allocate(cdecl, init=None):
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return allocator(cdecl, init)
|
||||||
|
return allocate
|
||||||
|
|
||||||
|
def cast(self, cdecl, source):
|
||||||
|
"""Similar to a C cast: returns an instance of the named C
|
||||||
|
type initialized with the given 'source'. The source is
|
||||||
|
casted between integers or pointers of any type.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._backend.cast(cdecl, source)
|
||||||
|
|
||||||
|
def string(self, cdata, maxlen=-1):
|
||||||
|
"""Return a Python string (or unicode string) from the 'cdata'.
|
||||||
|
If 'cdata' is a pointer or array of characters or bytes, returns
|
||||||
|
the null-terminated string. The returned string extends until
|
||||||
|
the first null character, or at most 'maxlen' characters. If
|
||||||
|
'cdata' is an array then 'maxlen' defaults to its length.
|
||||||
|
|
||||||
|
If 'cdata' is a pointer or array of wchar_t, returns a unicode
|
||||||
|
string following the same rules.
|
||||||
|
|
||||||
|
If 'cdata' is a single character or byte or a wchar_t, returns
|
||||||
|
it as a string or unicode string.
|
||||||
|
|
||||||
|
If 'cdata' is an enum, returns the value of the enumerator as a
|
||||||
|
string, or 'NUMBER' if the value is out of range.
|
||||||
|
"""
|
||||||
|
return self._backend.string(cdata, maxlen)
|
||||||
|
|
||||||
|
def unpack(self, cdata, length):
|
||||||
|
"""Unpack an array of C data of the given length,
|
||||||
|
returning a Python string/unicode/list.
|
||||||
|
|
||||||
|
If 'cdata' is a pointer to 'char', returns a byte string.
|
||||||
|
It does not stop at the first null. This is equivalent to:
|
||||||
|
ffi.buffer(cdata, length)[:]
|
||||||
|
|
||||||
|
If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
|
||||||
|
'length' is measured in wchar_t's; it is not the size in bytes.
|
||||||
|
|
||||||
|
If 'cdata' is a pointer to anything else, returns a list of
|
||||||
|
'length' items. This is a faster equivalent to:
|
||||||
|
[cdata[i] for i in range(length)]
|
||||||
|
"""
|
||||||
|
return self._backend.unpack(cdata, length)
|
||||||
|
|
||||||
|
#def buffer(self, cdata, size=-1):
|
||||||
|
# """Return a read-write buffer object that references the raw C data
|
||||||
|
# pointed to by the given 'cdata'. The 'cdata' must be a pointer or
|
||||||
|
# an array. Can be passed to functions expecting a buffer, or directly
|
||||||
|
# manipulated with:
|
||||||
|
#
|
||||||
|
# buf[:] get a copy of it in a regular string, or
|
||||||
|
# buf[idx] as a single character
|
||||||
|
# buf[:] = ...
|
||||||
|
# buf[idx] = ... change the content
|
||||||
|
# """
|
||||||
|
# note that 'buffer' is a type, set on this instance by __init__
|
||||||
|
|
||||||
|
def from_buffer(self, cdecl, python_buffer=_unspecified,
|
||||||
|
require_writable=False):
|
||||||
|
"""Return a cdata of the given type pointing to the data of the
|
||||||
|
given Python object, which must support the buffer interface.
|
||||||
|
Note that this is not meant to be used on the built-in types
|
||||||
|
str or unicode (you can build 'char[]' arrays explicitly)
|
||||||
|
but only on objects containing large quantities of raw data
|
||||||
|
in some other format, like 'array.array' or numpy arrays.
|
||||||
|
|
||||||
|
The first argument is optional and default to 'char[]'.
|
||||||
|
"""
|
||||||
|
if python_buffer is _unspecified:
|
||||||
|
cdecl, python_buffer = self.BCharA, cdecl
|
||||||
|
elif isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._backend.from_buffer(cdecl, python_buffer,
|
||||||
|
require_writable)
|
||||||
|
|
||||||
|
def memmove(self, dest, src, n):
|
||||||
|
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
|
||||||
|
|
||||||
|
Like the C function memmove(), the memory areas may overlap;
|
||||||
|
apart from that it behaves like the C function memcpy().
|
||||||
|
|
||||||
|
'src' can be any cdata ptr or array, or any Python buffer object.
|
||||||
|
'dest' can be any cdata ptr or array, or a writable Python buffer
|
||||||
|
object. The size to copy, 'n', is always measured in bytes.
|
||||||
|
|
||||||
|
Unlike other methods, this one supports all Python buffer including
|
||||||
|
byte strings and bytearrays---but it still does not support
|
||||||
|
non-contiguous buffers.
|
||||||
|
"""
|
||||||
|
return self._backend.memmove(dest, src, n)
|
||||||
|
|
||||||
|
def callback(self, cdecl, python_callable=None, error=None, onerror=None):
|
||||||
|
"""Return a callback object or a decorator making such a
|
||||||
|
callback object. 'cdecl' must name a C function pointer type.
|
||||||
|
The callback invokes the specified 'python_callable' (which may
|
||||||
|
be provided either directly or via a decorator). Important: the
|
||||||
|
callback object must be manually kept alive for as long as the
|
||||||
|
callback may be invoked from the C level.
|
||||||
|
"""
|
||||||
|
def callback_decorator_wrap(python_callable):
|
||||||
|
if not callable(python_callable):
|
||||||
|
raise TypeError("the 'python_callable' argument "
|
||||||
|
"is not callable")
|
||||||
|
return self._backend.callback(cdecl, python_callable,
|
||||||
|
error, onerror)
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
|
||||||
|
if python_callable is None:
|
||||||
|
return callback_decorator_wrap # decorator mode
|
||||||
|
else:
|
||||||
|
return callback_decorator_wrap(python_callable) # direct mode
|
||||||
|
|
||||||
|
def getctype(self, cdecl, replace_with=''):
|
||||||
|
"""Return a string giving the C type 'cdecl', which may be itself
|
||||||
|
a string or a <ctype> object. If 'replace_with' is given, it gives
|
||||||
|
extra text to append (or insert for more complicated C types), like
|
||||||
|
a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
replace_with = replace_with.strip()
|
||||||
|
if (replace_with.startswith('*')
|
||||||
|
and '&[' in self._backend.getcname(cdecl, '&')):
|
||||||
|
replace_with = '(%s)' % replace_with
|
||||||
|
elif replace_with and not replace_with[0] in '[(':
|
||||||
|
replace_with = ' ' + replace_with
|
||||||
|
return self._backend.getcname(cdecl, replace_with)
|
||||||
|
|
||||||
|
def gc(self, cdata, destructor, size=0):
|
||||||
|
"""Return a new cdata object that points to the same
|
||||||
|
data. Later, when this new cdata object is garbage-collected,
|
||||||
|
'destructor(old_cdata_object)' will be called.
|
||||||
|
|
||||||
|
The optional 'size' gives an estimate of the size, used to
|
||||||
|
trigger the garbage collection more eagerly. So far only used
|
||||||
|
on PyPy. It tells the GC that the returned object keeps alive
|
||||||
|
roughly 'size' bytes of external memory.
|
||||||
|
"""
|
||||||
|
return self._backend.gcp(cdata, destructor, size)
|
||||||
|
|
||||||
|
def _get_cached_btype(self, type):
|
||||||
|
assert self._lock.acquire(False) is False
|
||||||
|
# call me with the lock!
|
||||||
|
try:
|
||||||
|
BType = self._cached_btypes[type]
|
||||||
|
except KeyError:
|
||||||
|
finishlist = []
|
||||||
|
BType = type.get_cached_btype(self, finishlist)
|
||||||
|
for type in finishlist:
|
||||||
|
type.finish_backend_type(self, finishlist)
|
||||||
|
return BType
|
||||||
|
|
||||||
|
def verify(self, source='', tmpdir=None, **kwargs):
|
||||||
|
"""Verify that the current ffi signatures compile on this
|
||||||
|
machine, and return a dynamic library object. The dynamic
|
||||||
|
library can be used to call functions and access global
|
||||||
|
variables declared in this 'ffi'. The library is compiled
|
||||||
|
by the C compiler: it gives you C-level API compatibility
|
||||||
|
(including calling macros). This is unlike 'ffi.dlopen()',
|
||||||
|
which requires binary compatibility in the signatures.
|
||||||
|
"""
|
||||||
|
from .verifier import Verifier, _caller_dir_pycache
|
||||||
|
#
|
||||||
|
# If set_unicode(True) was called, insert the UNICODE and
|
||||||
|
# _UNICODE macro declarations
|
||||||
|
if self._windows_unicode:
|
||||||
|
self._apply_windows_unicode(kwargs)
|
||||||
|
#
|
||||||
|
# Set the tmpdir here, and not in Verifier.__init__: it picks
|
||||||
|
# up the caller's directory, which we want to be the caller of
|
||||||
|
# ffi.verify(), as opposed to the caller of Veritier().
|
||||||
|
tmpdir = tmpdir or _caller_dir_pycache()
|
||||||
|
#
|
||||||
|
# Make a Verifier() and use it to load the library.
|
||||||
|
self.verifier = Verifier(self, source, tmpdir, **kwargs)
|
||||||
|
lib = self.verifier.load_library()
|
||||||
|
#
|
||||||
|
# Save the loaded library for keep-alive purposes, even
|
||||||
|
# if the caller doesn't keep it alive itself (it should).
|
||||||
|
self._libraries.append(lib)
|
||||||
|
return lib
|
||||||
|
|
||||||
|
def _get_errno(self):
|
||||||
|
return self._backend.get_errno()
|
||||||
|
def _set_errno(self, errno):
|
||||||
|
self._backend.set_errno(errno)
|
||||||
|
errno = property(_get_errno, _set_errno, None,
|
||||||
|
"the value of 'errno' from/to the C calls")
|
||||||
|
|
||||||
|
def getwinerror(self, code=-1):
|
||||||
|
return self._backend.getwinerror(code)
|
||||||
|
|
||||||
|
def _pointer_to(self, ctype):
|
||||||
|
with self._lock:
|
||||||
|
return model.pointer_cache(self, ctype)
|
||||||
|
|
||||||
|
def addressof(self, cdata, *fields_or_indexes):
|
||||||
|
"""Return the address of a <cdata 'struct-or-union'>.
|
||||||
|
If 'fields_or_indexes' are given, returns the address of that
|
||||||
|
field or array item in the structure or array, recursively in
|
||||||
|
case of nested structures.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
ctype = self._backend.typeof(cdata)
|
||||||
|
except TypeError:
|
||||||
|
if '__addressof__' in type(cdata).__dict__:
|
||||||
|
return type(cdata).__addressof__(cdata, *fields_or_indexes)
|
||||||
|
raise
|
||||||
|
if fields_or_indexes:
|
||||||
|
ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
|
||||||
|
else:
|
||||||
|
if ctype.kind == "pointer":
|
||||||
|
raise TypeError("addressof(pointer)")
|
||||||
|
offset = 0
|
||||||
|
ctypeptr = self._pointer_to(ctype)
|
||||||
|
return self._backend.rawaddressof(ctypeptr, cdata, offset)
|
||||||
|
|
||||||
|
def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
|
||||||
|
ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
|
||||||
|
for field1 in fields_or_indexes:
|
||||||
|
ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
|
||||||
|
offset += offset1
|
||||||
|
return ctype, offset
|
||||||
|
|
||||||
|
def include(self, ffi_to_include):
|
||||||
|
"""Includes the typedefs, structs, unions and enums defined
|
||||||
|
in another FFI instance. Usage is similar to a #include in C,
|
||||||
|
where a part of the program might include types defined in
|
||||||
|
another part for its own usage. Note that the include()
|
||||||
|
method has no effect on functions, constants and global
|
||||||
|
variables, which must anyway be accessed directly from the
|
||||||
|
lib object returned by the original FFI instance.
|
||||||
|
"""
|
||||||
|
if not isinstance(ffi_to_include, FFI):
|
||||||
|
raise TypeError("ffi.include() expects an argument that is also of"
|
||||||
|
" type cffi.FFI, not %r" % (
|
||||||
|
type(ffi_to_include).__name__,))
|
||||||
|
if ffi_to_include is self:
|
||||||
|
raise ValueError("self.include(self)")
|
||||||
|
with ffi_to_include._lock:
|
||||||
|
with self._lock:
|
||||||
|
self._parser.include(ffi_to_include._parser)
|
||||||
|
self._cdefsources.append('[')
|
||||||
|
self._cdefsources.extend(ffi_to_include._cdefsources)
|
||||||
|
self._cdefsources.append(']')
|
||||||
|
self._included_ffis.append(ffi_to_include)
|
||||||
|
|
||||||
|
def new_handle(self, x):
|
||||||
|
return self._backend.newp_handle(self.BVoidP, x)
|
||||||
|
|
||||||
|
def from_handle(self, x):
|
||||||
|
return self._backend.from_handle(x)
|
||||||
|
|
||||||
|
def release(self, x):
|
||||||
|
self._backend.release(x)
|
||||||
|
|
||||||
|
def set_unicode(self, enabled_flag):
|
||||||
|
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
|
||||||
|
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
|
||||||
|
to be (pointers to) wchar_t. If 'enabled_flag' is False,
|
||||||
|
declare these types to be (pointers to) plain 8-bit characters.
|
||||||
|
This is mostly for backward compatibility; you usually want True.
|
||||||
|
"""
|
||||||
|
if self._windows_unicode is not None:
|
||||||
|
raise ValueError("set_unicode() can only be called once")
|
||||||
|
enabled_flag = bool(enabled_flag)
|
||||||
|
if enabled_flag:
|
||||||
|
self.cdef("typedef wchar_t TBYTE;"
|
||||||
|
"typedef wchar_t TCHAR;"
|
||||||
|
"typedef const wchar_t *LPCTSTR;"
|
||||||
|
"typedef const wchar_t *PCTSTR;"
|
||||||
|
"typedef wchar_t *LPTSTR;"
|
||||||
|
"typedef wchar_t *PTSTR;"
|
||||||
|
"typedef TBYTE *PTBYTE;"
|
||||||
|
"typedef TCHAR *PTCHAR;")
|
||||||
|
else:
|
||||||
|
self.cdef("typedef char TBYTE;"
|
||||||
|
"typedef char TCHAR;"
|
||||||
|
"typedef const char *LPCTSTR;"
|
||||||
|
"typedef const char *PCTSTR;"
|
||||||
|
"typedef char *LPTSTR;"
|
||||||
|
"typedef char *PTSTR;"
|
||||||
|
"typedef TBYTE *PTBYTE;"
|
||||||
|
"typedef TCHAR *PTCHAR;")
|
||||||
|
self._windows_unicode = enabled_flag
|
||||||
|
|
||||||
|
def _apply_windows_unicode(self, kwds):
|
||||||
|
defmacros = kwds.get('define_macros', ())
|
||||||
|
if not isinstance(defmacros, (list, tuple)):
|
||||||
|
raise TypeError("'define_macros' must be a list or tuple")
|
||||||
|
defmacros = list(defmacros) + [('UNICODE', '1'),
|
||||||
|
('_UNICODE', '1')]
|
||||||
|
kwds['define_macros'] = defmacros
|
||||||
|
|
||||||
|
def _apply_embedding_fix(self, kwds):
|
||||||
|
# must include an argument like "-lpython2.7" for the compiler
|
||||||
|
def ensure(key, value):
|
||||||
|
lst = kwds.setdefault(key, [])
|
||||||
|
if value not in lst:
|
||||||
|
lst.append(value)
|
||||||
|
#
|
||||||
|
if '__pypy__' in sys.builtin_module_names:
|
||||||
|
import os
|
||||||
|
if sys.platform == "win32":
|
||||||
|
# we need 'libpypy-c.lib'. Current distributions of
|
||||||
|
# pypy (>= 4.1) contain it as 'libs/python27.lib'.
|
||||||
|
pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
|
||||||
|
if hasattr(sys, 'prefix'):
|
||||||
|
ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
|
||||||
|
else:
|
||||||
|
# we need 'libpypy-c.{so,dylib}', which should be by
|
||||||
|
# default located in 'sys.prefix/bin' for installed
|
||||||
|
# systems.
|
||||||
|
if sys.version_info < (3,):
|
||||||
|
pythonlib = "pypy-c"
|
||||||
|
else:
|
||||||
|
pythonlib = "pypy3-c"
|
||||||
|
if hasattr(sys, 'prefix'):
|
||||||
|
ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
|
||||||
|
# On uninstalled pypy's, the libpypy-c is typically found in
|
||||||
|
# .../pypy/goal/.
|
||||||
|
if hasattr(sys, 'prefix'):
|
||||||
|
ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
|
||||||
|
else:
|
||||||
|
if sys.platform == "win32":
|
||||||
|
template = "python%d%d"
|
||||||
|
if hasattr(sys, 'gettotalrefcount'):
|
||||||
|
template += '_d'
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
import sysconfig
|
||||||
|
except ImportError: # 2.6
|
||||||
|
from distutils import sysconfig
|
||||||
|
template = "python%d.%d"
|
||||||
|
if sysconfig.get_config_var('DEBUG_EXT'):
|
||||||
|
template += sysconfig.get_config_var('DEBUG_EXT')
|
||||||
|
pythonlib = (template %
|
||||||
|
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||||
|
if hasattr(sys, 'abiflags'):
|
||||||
|
pythonlib += sys.abiflags
|
||||||
|
ensure('libraries', pythonlib)
|
||||||
|
if sys.platform == "win32":
|
||||||
|
ensure('extra_link_args', '/MANIFEST')
|
||||||
|
|
||||||
|
def set_source(self, module_name, source, source_extension='.c', **kwds):
|
||||||
|
import os
|
||||||
|
if hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() cannot be called several times "
|
||||||
|
"per ffi object")
|
||||||
|
if not isinstance(module_name, basestring):
|
||||||
|
raise TypeError("'module_name' must be a string")
|
||||||
|
if os.sep in module_name or (os.altsep and os.altsep in module_name):
|
||||||
|
raise ValueError("'module_name' must not contain '/': use a dotted "
|
||||||
|
"name to make a 'package.module' location")
|
||||||
|
self._assigned_source = (str(module_name), source,
|
||||||
|
source_extension, kwds)
|
||||||
|
|
||||||
|
def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
|
||||||
|
source_extension='.c', **kwds):
|
||||||
|
from . import pkgconfig
|
||||||
|
if not isinstance(pkgconfig_libs, list):
|
||||||
|
raise TypeError("the pkgconfig_libs argument must be a list "
|
||||||
|
"of package names")
|
||||||
|
kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
|
||||||
|
pkgconfig.merge_flags(kwds, kwds2)
|
||||||
|
self.set_source(module_name, source, source_extension, **kwds)
|
||||||
|
|
||||||
|
def distutils_extension(self, tmpdir='build', verbose=True):
|
||||||
|
from distutils.dir_util import mkpath
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
|
||||||
|
return self.verifier.get_extension()
|
||||||
|
raise ValueError("set_source() must be called before"
|
||||||
|
" distutils_extension()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
if source is None:
|
||||||
|
raise TypeError("distutils_extension() is only for C extension "
|
||||||
|
"modules, not for dlopen()-style pure Python "
|
||||||
|
"modules")
|
||||||
|
mkpath(tmpdir)
|
||||||
|
ext, updated = recompile(self, module_name,
|
||||||
|
source, tmpdir=tmpdir, extradir=tmpdir,
|
||||||
|
source_extension=source_extension,
|
||||||
|
call_c_compiler=False, **kwds)
|
||||||
|
if verbose:
|
||||||
|
if updated:
|
||||||
|
sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
|
||||||
|
else:
|
||||||
|
sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
|
||||||
|
return ext
|
||||||
|
|
||||||
|
def emit_c_code(self, filename):
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() must be called before emit_c_code()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
if source is None:
|
||||||
|
raise TypeError("emit_c_code() is only for C extension modules, "
|
||||||
|
"not for dlopen()-style pure Python modules")
|
||||||
|
recompile(self, module_name, source,
|
||||||
|
c_file=filename, call_c_compiler=False, **kwds)
|
||||||
|
|
||||||
|
def emit_python_code(self, filename):
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() must be called before emit_c_code()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
if source is not None:
|
||||||
|
raise TypeError("emit_python_code() is only for dlopen()-style "
|
||||||
|
"pure Python modules, not for C extension modules")
|
||||||
|
recompile(self, module_name, source,
|
||||||
|
c_file=filename, call_c_compiler=False, **kwds)
|
||||||
|
|
||||||
|
def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
|
||||||
|
"""The 'target' argument gives the final file name of the
|
||||||
|
compiled DLL. Use '*' to force distutils' choice, suitable for
|
||||||
|
regular CPython C API modules. Use a file name ending in '.*'
|
||||||
|
to ask for the system's default extension for dynamic libraries
|
||||||
|
(.so/.dll/.dylib).
|
||||||
|
|
||||||
|
The default is '*' when building a non-embedded C API extension,
|
||||||
|
and (module_name + '.*') when building an embedded library.
|
||||||
|
"""
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() must be called before compile()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
return recompile(self, module_name, source, tmpdir=tmpdir,
|
||||||
|
target=target, source_extension=source_extension,
|
||||||
|
compiler_verbose=verbose, debug=debug, **kwds)
|
||||||
|
|
||||||
|
def init_once(self, func, tag):
|
||||||
|
# Read _init_once_cache[tag], which is either (False, lock) if
|
||||||
|
# we're calling the function now in some thread, or (True, result).
|
||||||
|
# Don't call setdefault() in most cases, to avoid allocating and
|
||||||
|
# immediately freeing a lock; but still use setdefaut() to avoid
|
||||||
|
# races.
|
||||||
|
try:
|
||||||
|
x = self._init_once_cache[tag]
|
||||||
|
except KeyError:
|
||||||
|
x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
|
||||||
|
# Common case: we got (True, result), so we return the result.
|
||||||
|
if x[0]:
|
||||||
|
return x[1]
|
||||||
|
# Else, it's a lock. Acquire it to serialize the following tests.
|
||||||
|
with x[1]:
|
||||||
|
# Read again from _init_once_cache the current status.
|
||||||
|
x = self._init_once_cache[tag]
|
||||||
|
if x[0]:
|
||||||
|
return x[1]
|
||||||
|
# Call the function and store the result back.
|
||||||
|
result = func()
|
||||||
|
self._init_once_cache[tag] = (True, result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def embedding_init_code(self, pysource):
|
||||||
|
if self._embedding:
|
||||||
|
raise ValueError("embedding_init_code() can only be called once")
|
||||||
|
# fix 'pysource' before it gets dumped into the C file:
|
||||||
|
# - remove empty lines at the beginning, so it starts at "line 1"
|
||||||
|
# - dedent, if all non-empty lines are indented
|
||||||
|
# - check for SyntaxErrors
|
||||||
|
import re
|
||||||
|
match = re.match(r'\s*\n', pysource)
|
||||||
|
if match:
|
||||||
|
pysource = pysource[match.end():]
|
||||||
|
lines = pysource.splitlines() or ['']
|
||||||
|
prefix = re.match(r'\s*', lines[0]).group()
|
||||||
|
for i in range(1, len(lines)):
|
||||||
|
line = lines[i]
|
||||||
|
if line.rstrip():
|
||||||
|
while not line.startswith(prefix):
|
||||||
|
prefix = prefix[:-1]
|
||||||
|
i = len(prefix)
|
||||||
|
lines = [line[i:]+'\n' for line in lines]
|
||||||
|
pysource = ''.join(lines)
|
||||||
|
#
|
||||||
|
compile(pysource, "cffi_init", "exec")
|
||||||
|
#
|
||||||
|
self._embedding = pysource
|
||||||
|
|
||||||
|
def def_extern(self, *args, **kwds):
|
||||||
|
raise ValueError("ffi.def_extern() is only available on API-mode FFI "
|
||||||
|
"objects")
|
||||||
|
|
||||||
|
def list_types(self):
|
||||||
|
"""Returns the user type names known to this FFI instance.
|
||||||
|
This returns a tuple containing three lists of names:
|
||||||
|
(typedef_names, names_of_structs, names_of_unions)
|
||||||
|
"""
|
||||||
|
typedefs = []
|
||||||
|
structs = []
|
||||||
|
unions = []
|
||||||
|
for key in self._parser._declarations:
|
||||||
|
if key.startswith('typedef '):
|
||||||
|
typedefs.append(key[8:])
|
||||||
|
elif key.startswith('struct '):
|
||||||
|
structs.append(key[7:])
|
||||||
|
elif key.startswith('union '):
|
||||||
|
unions.append(key[6:])
|
||||||
|
typedefs.sort()
|
||||||
|
structs.sort()
|
||||||
|
unions.sort()
|
||||||
|
return (typedefs, structs, unions)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_backend_lib(backend, name, flags):
|
||||||
|
import os
|
||||||
|
if not isinstance(name, basestring):
|
||||||
|
if sys.platform != "win32" or name is not None:
|
||||||
|
return backend.load_library(name, flags)
|
||||||
|
name = "c" # Windows: load_library(None) fails, but this works
|
||||||
|
# on Python 2 (backward compatibility hack only)
|
||||||
|
first_error = None
|
||||||
|
if '.' in name or '/' in name or os.sep in name:
|
||||||
|
try:
|
||||||
|
return backend.load_library(name, flags)
|
||||||
|
except OSError as e:
|
||||||
|
first_error = e
|
||||||
|
import ctypes.util
|
||||||
|
path = ctypes.util.find_library(name)
|
||||||
|
if path is None:
|
||||||
|
if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
|
||||||
|
raise OSError("dlopen(None) cannot work on Windows for Python 3 "
|
||||||
|
"(see http://bugs.python.org/issue23606)")
|
||||||
|
msg = ("ctypes.util.find_library() did not manage "
|
||||||
|
"to locate a library called %r" % (name,))
|
||||||
|
if first_error is not None:
|
||||||
|
msg = "%s. Additionally, %s" % (first_error, msg)
|
||||||
|
raise OSError(msg)
|
||||||
|
return backend.load_library(path, flags)
|
||||||
|
|
||||||
|
def _make_ffi_library(ffi, libname, flags):
|
||||||
|
backend = ffi._backend
|
||||||
|
backendlib = _load_backend_lib(backend, libname, flags)
|
||||||
|
#
|
||||||
|
def accessor_function(name):
|
||||||
|
key = 'function ' + name
|
||||||
|
tp, _ = ffi._parser._declarations[key]
|
||||||
|
BType = ffi._get_cached_btype(tp)
|
||||||
|
value = backendlib.load_function(BType, name)
|
||||||
|
library.__dict__[name] = value
|
||||||
|
#
|
||||||
|
def accessor_variable(name):
|
||||||
|
key = 'variable ' + name
|
||||||
|
tp, _ = ffi._parser._declarations[key]
|
||||||
|
BType = ffi._get_cached_btype(tp)
|
||||||
|
read_variable = backendlib.read_variable
|
||||||
|
write_variable = backendlib.write_variable
|
||||||
|
setattr(FFILibrary, name, property(
|
||||||
|
lambda self: read_variable(BType, name),
|
||||||
|
lambda self, value: write_variable(BType, name, value)))
|
||||||
|
#
|
||||||
|
def addressof_var(name):
|
||||||
|
try:
|
||||||
|
return addr_variables[name]
|
||||||
|
except KeyError:
|
||||||
|
with ffi._lock:
|
||||||
|
if name not in addr_variables:
|
||||||
|
key = 'variable ' + name
|
||||||
|
tp, _ = ffi._parser._declarations[key]
|
||||||
|
BType = ffi._get_cached_btype(tp)
|
||||||
|
if BType.kind != 'array':
|
||||||
|
BType = model.pointer_cache(ffi, BType)
|
||||||
|
p = backendlib.load_function(BType, name)
|
||||||
|
addr_variables[name] = p
|
||||||
|
return addr_variables[name]
|
||||||
|
#
|
||||||
|
def accessor_constant(name):
|
||||||
|
raise NotImplementedError("non-integer constant '%s' cannot be "
|
||||||
|
"accessed from a dlopen() library" % (name,))
|
||||||
|
#
|
||||||
|
def accessor_int_constant(name):
|
||||||
|
library.__dict__[name] = ffi._parser._int_constants[name]
|
||||||
|
#
|
||||||
|
accessors = {}
|
||||||
|
accessors_version = [False]
|
||||||
|
addr_variables = {}
|
||||||
|
#
|
||||||
|
def update_accessors():
|
||||||
|
if accessors_version[0] is ffi._cdef_version:
|
||||||
|
return
|
||||||
|
#
|
||||||
|
for key, (tp, _) in ffi._parser._declarations.items():
|
||||||
|
if not isinstance(tp, model.EnumType):
|
||||||
|
tag, name = key.split(' ', 1)
|
||||||
|
if tag == 'function':
|
||||||
|
accessors[name] = accessor_function
|
||||||
|
elif tag == 'variable':
|
||||||
|
accessors[name] = accessor_variable
|
||||||
|
elif tag == 'constant':
|
||||||
|
accessors[name] = accessor_constant
|
||||||
|
else:
|
||||||
|
for i, enumname in enumerate(tp.enumerators):
|
||||||
|
def accessor_enum(name, tp=tp, i=i):
|
||||||
|
tp.check_not_partial()
|
||||||
|
library.__dict__[name] = tp.enumvalues[i]
|
||||||
|
accessors[enumname] = accessor_enum
|
||||||
|
for name in ffi._parser._int_constants:
|
||||||
|
accessors.setdefault(name, accessor_int_constant)
|
||||||
|
accessors_version[0] = ffi._cdef_version
|
||||||
|
#
|
||||||
|
def make_accessor(name):
|
||||||
|
with ffi._lock:
|
||||||
|
if name in library.__dict__ or name in FFILibrary.__dict__:
|
||||||
|
return # added by another thread while waiting for the lock
|
||||||
|
if name not in accessors:
|
||||||
|
update_accessors()
|
||||||
|
if name not in accessors:
|
||||||
|
raise AttributeError(name)
|
||||||
|
accessors[name](name)
|
||||||
|
#
|
||||||
|
class FFILibrary(object):
|
||||||
|
def __getattr__(self, name):
|
||||||
|
make_accessor(name)
|
||||||
|
return getattr(self, name)
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
try:
|
||||||
|
property = getattr(self.__class__, name)
|
||||||
|
except AttributeError:
|
||||||
|
make_accessor(name)
|
||||||
|
setattr(self, name, value)
|
||||||
|
else:
|
||||||
|
property.__set__(self, value)
|
||||||
|
def __dir__(self):
|
||||||
|
with ffi._lock:
|
||||||
|
update_accessors()
|
||||||
|
return accessors.keys()
|
||||||
|
def __addressof__(self, name):
|
||||||
|
if name in library.__dict__:
|
||||||
|
return library.__dict__[name]
|
||||||
|
if name in FFILibrary.__dict__:
|
||||||
|
return addressof_var(name)
|
||||||
|
make_accessor(name)
|
||||||
|
if name in library.__dict__:
|
||||||
|
return library.__dict__[name]
|
||||||
|
if name in FFILibrary.__dict__:
|
||||||
|
return addressof_var(name)
|
||||||
|
raise AttributeError("cffi library has no function or "
|
||||||
|
"global variable named '%s'" % (name,))
|
||||||
|
def __cffi_close__(self):
|
||||||
|
backendlib.close_lib()
|
||||||
|
self.__dict__.clear()
|
||||||
|
#
|
||||||
|
if isinstance(libname, basestring):
|
||||||
|
try:
|
||||||
|
if not isinstance(libname, str): # unicode, on Python 2
|
||||||
|
libname = libname.encode('utf-8')
|
||||||
|
FFILibrary.__name__ = 'FFILibrary_%s' % libname
|
||||||
|
except UnicodeError:
|
||||||
|
pass
|
||||||
|
library = FFILibrary()
|
||||||
|
return library, library.__dict__
|
||||||
|
|
||||||
|
def _builtin_function_type(func):
|
||||||
|
# a hack to make at least ffi.typeof(builtin_function) work,
|
||||||
|
# if the builtin function was obtained by 'vengine_cpy'.
|
||||||
|
import sys
|
||||||
|
try:
|
||||||
|
module = sys.modules[func.__module__]
|
||||||
|
ffi = module._cffi_original_ffi
|
||||||
|
types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
|
||||||
|
tp = types_of_builtin_funcs[func]
|
||||||
|
except (KeyError, AttributeError, TypeError):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
with ffi._lock:
|
||||||
|
return ffi._get_cached_btype(tp)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,187 @@
|
||||||
|
from .error import VerificationError
|
||||||
|
|
||||||
|
class CffiOp(object):
|
||||||
|
def __init__(self, op, arg):
|
||||||
|
self.op = op
|
||||||
|
self.arg = arg
|
||||||
|
|
||||||
|
def as_c_expr(self):
|
||||||
|
if self.op is None:
|
||||||
|
assert isinstance(self.arg, str)
|
||||||
|
return '(_cffi_opcode_t)(%s)' % (self.arg,)
|
||||||
|
classname = CLASS_NAME[self.op]
|
||||||
|
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
|
||||||
|
|
||||||
|
def as_python_bytes(self):
|
||||||
|
if self.op is None and self.arg.isdigit():
|
||||||
|
value = int(self.arg) # non-negative: '-' not in self.arg
|
||||||
|
if value >= 2**31:
|
||||||
|
raise OverflowError("cannot emit %r: limited to 2**31-1"
|
||||||
|
% (self.arg,))
|
||||||
|
return format_four_bytes(value)
|
||||||
|
if isinstance(self.arg, str):
|
||||||
|
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
|
||||||
|
return format_four_bytes((self.arg << 8) | self.op)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
classname = CLASS_NAME.get(self.op, self.op)
|
||||||
|
return '(%s %s)' % (classname, self.arg)
|
||||||
|
|
||||||
|
def format_four_bytes(num):
|
||||||
|
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
|
||||||
|
(num >> 24) & 0xFF,
|
||||||
|
(num >> 16) & 0xFF,
|
||||||
|
(num >> 8) & 0xFF,
|
||||||
|
(num ) & 0xFF)
|
||||||
|
|
||||||
|
OP_PRIMITIVE = 1
|
||||||
|
OP_POINTER = 3
|
||||||
|
OP_ARRAY = 5
|
||||||
|
OP_OPEN_ARRAY = 7
|
||||||
|
OP_STRUCT_UNION = 9
|
||||||
|
OP_ENUM = 11
|
||||||
|
OP_FUNCTION = 13
|
||||||
|
OP_FUNCTION_END = 15
|
||||||
|
OP_NOOP = 17
|
||||||
|
OP_BITFIELD = 19
|
||||||
|
OP_TYPENAME = 21
|
||||||
|
OP_CPYTHON_BLTN_V = 23 # varargs
|
||||||
|
OP_CPYTHON_BLTN_N = 25 # noargs
|
||||||
|
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
|
||||||
|
OP_CONSTANT = 29
|
||||||
|
OP_CONSTANT_INT = 31
|
||||||
|
OP_GLOBAL_VAR = 33
|
||||||
|
OP_DLOPEN_FUNC = 35
|
||||||
|
OP_DLOPEN_CONST = 37
|
||||||
|
OP_GLOBAL_VAR_F = 39
|
||||||
|
OP_EXTERN_PYTHON = 41
|
||||||
|
|
||||||
|
PRIM_VOID = 0
|
||||||
|
PRIM_BOOL = 1
|
||||||
|
PRIM_CHAR = 2
|
||||||
|
PRIM_SCHAR = 3
|
||||||
|
PRIM_UCHAR = 4
|
||||||
|
PRIM_SHORT = 5
|
||||||
|
PRIM_USHORT = 6
|
||||||
|
PRIM_INT = 7
|
||||||
|
PRIM_UINT = 8
|
||||||
|
PRIM_LONG = 9
|
||||||
|
PRIM_ULONG = 10
|
||||||
|
PRIM_LONGLONG = 11
|
||||||
|
PRIM_ULONGLONG = 12
|
||||||
|
PRIM_FLOAT = 13
|
||||||
|
PRIM_DOUBLE = 14
|
||||||
|
PRIM_LONGDOUBLE = 15
|
||||||
|
|
||||||
|
PRIM_WCHAR = 16
|
||||||
|
PRIM_INT8 = 17
|
||||||
|
PRIM_UINT8 = 18
|
||||||
|
PRIM_INT16 = 19
|
||||||
|
PRIM_UINT16 = 20
|
||||||
|
PRIM_INT32 = 21
|
||||||
|
PRIM_UINT32 = 22
|
||||||
|
PRIM_INT64 = 23
|
||||||
|
PRIM_UINT64 = 24
|
||||||
|
PRIM_INTPTR = 25
|
||||||
|
PRIM_UINTPTR = 26
|
||||||
|
PRIM_PTRDIFF = 27
|
||||||
|
PRIM_SIZE = 28
|
||||||
|
PRIM_SSIZE = 29
|
||||||
|
PRIM_INT_LEAST8 = 30
|
||||||
|
PRIM_UINT_LEAST8 = 31
|
||||||
|
PRIM_INT_LEAST16 = 32
|
||||||
|
PRIM_UINT_LEAST16 = 33
|
||||||
|
PRIM_INT_LEAST32 = 34
|
||||||
|
PRIM_UINT_LEAST32 = 35
|
||||||
|
PRIM_INT_LEAST64 = 36
|
||||||
|
PRIM_UINT_LEAST64 = 37
|
||||||
|
PRIM_INT_FAST8 = 38
|
||||||
|
PRIM_UINT_FAST8 = 39
|
||||||
|
PRIM_INT_FAST16 = 40
|
||||||
|
PRIM_UINT_FAST16 = 41
|
||||||
|
PRIM_INT_FAST32 = 42
|
||||||
|
PRIM_UINT_FAST32 = 43
|
||||||
|
PRIM_INT_FAST64 = 44
|
||||||
|
PRIM_UINT_FAST64 = 45
|
||||||
|
PRIM_INTMAX = 46
|
||||||
|
PRIM_UINTMAX = 47
|
||||||
|
PRIM_FLOATCOMPLEX = 48
|
||||||
|
PRIM_DOUBLECOMPLEX = 49
|
||||||
|
PRIM_CHAR16 = 50
|
||||||
|
PRIM_CHAR32 = 51
|
||||||
|
|
||||||
|
_NUM_PRIM = 52
|
||||||
|
_UNKNOWN_PRIM = -1
|
||||||
|
_UNKNOWN_FLOAT_PRIM = -2
|
||||||
|
_UNKNOWN_LONG_DOUBLE = -3
|
||||||
|
|
||||||
|
_IO_FILE_STRUCT = -1
|
||||||
|
|
||||||
|
PRIMITIVE_TO_INDEX = {
|
||||||
|
'char': PRIM_CHAR,
|
||||||
|
'short': PRIM_SHORT,
|
||||||
|
'int': PRIM_INT,
|
||||||
|
'long': PRIM_LONG,
|
||||||
|
'long long': PRIM_LONGLONG,
|
||||||
|
'signed char': PRIM_SCHAR,
|
||||||
|
'unsigned char': PRIM_UCHAR,
|
||||||
|
'unsigned short': PRIM_USHORT,
|
||||||
|
'unsigned int': PRIM_UINT,
|
||||||
|
'unsigned long': PRIM_ULONG,
|
||||||
|
'unsigned long long': PRIM_ULONGLONG,
|
||||||
|
'float': PRIM_FLOAT,
|
||||||
|
'double': PRIM_DOUBLE,
|
||||||
|
'long double': PRIM_LONGDOUBLE,
|
||||||
|
'float _Complex': PRIM_FLOATCOMPLEX,
|
||||||
|
'double _Complex': PRIM_DOUBLECOMPLEX,
|
||||||
|
'_Bool': PRIM_BOOL,
|
||||||
|
'wchar_t': PRIM_WCHAR,
|
||||||
|
'char16_t': PRIM_CHAR16,
|
||||||
|
'char32_t': PRIM_CHAR32,
|
||||||
|
'int8_t': PRIM_INT8,
|
||||||
|
'uint8_t': PRIM_UINT8,
|
||||||
|
'int16_t': PRIM_INT16,
|
||||||
|
'uint16_t': PRIM_UINT16,
|
||||||
|
'int32_t': PRIM_INT32,
|
||||||
|
'uint32_t': PRIM_UINT32,
|
||||||
|
'int64_t': PRIM_INT64,
|
||||||
|
'uint64_t': PRIM_UINT64,
|
||||||
|
'intptr_t': PRIM_INTPTR,
|
||||||
|
'uintptr_t': PRIM_UINTPTR,
|
||||||
|
'ptrdiff_t': PRIM_PTRDIFF,
|
||||||
|
'size_t': PRIM_SIZE,
|
||||||
|
'ssize_t': PRIM_SSIZE,
|
||||||
|
'int_least8_t': PRIM_INT_LEAST8,
|
||||||
|
'uint_least8_t': PRIM_UINT_LEAST8,
|
||||||
|
'int_least16_t': PRIM_INT_LEAST16,
|
||||||
|
'uint_least16_t': PRIM_UINT_LEAST16,
|
||||||
|
'int_least32_t': PRIM_INT_LEAST32,
|
||||||
|
'uint_least32_t': PRIM_UINT_LEAST32,
|
||||||
|
'int_least64_t': PRIM_INT_LEAST64,
|
||||||
|
'uint_least64_t': PRIM_UINT_LEAST64,
|
||||||
|
'int_fast8_t': PRIM_INT_FAST8,
|
||||||
|
'uint_fast8_t': PRIM_UINT_FAST8,
|
||||||
|
'int_fast16_t': PRIM_INT_FAST16,
|
||||||
|
'uint_fast16_t': PRIM_UINT_FAST16,
|
||||||
|
'int_fast32_t': PRIM_INT_FAST32,
|
||||||
|
'uint_fast32_t': PRIM_UINT_FAST32,
|
||||||
|
'int_fast64_t': PRIM_INT_FAST64,
|
||||||
|
'uint_fast64_t': PRIM_UINT_FAST64,
|
||||||
|
'intmax_t': PRIM_INTMAX,
|
||||||
|
'uintmax_t': PRIM_UINTMAX,
|
||||||
|
}
|
||||||
|
|
||||||
|
F_UNION = 0x01
|
||||||
|
F_CHECK_FIELDS = 0x02
|
||||||
|
F_PACKED = 0x04
|
||||||
|
F_EXTERNAL = 0x08
|
||||||
|
F_OPAQUE = 0x10
|
||||||
|
|
||||||
|
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
|
||||||
|
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
|
||||||
|
'F_EXTERNAL', 'F_OPAQUE']])
|
||||||
|
|
||||||
|
CLASS_NAME = {}
|
||||||
|
for _name, _value in list(globals().items()):
|
||||||
|
if _name.startswith('OP_') and isinstance(_value, int):
|
||||||
|
CLASS_NAME[_value] = _name[3:]
|
|
@ -0,0 +1,80 @@
|
||||||
|
import sys
|
||||||
|
from . import model
|
||||||
|
from .error import FFIError
|
||||||
|
|
||||||
|
|
||||||
|
COMMON_TYPES = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# fetch "bool" and all simple Windows types
|
||||||
|
from _cffi_backend import _get_common_types
|
||||||
|
_get_common_types(COMMON_TYPES)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
|
||||||
|
COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
|
||||||
|
|
||||||
|
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||||
|
if _type.endswith('_t'):
|
||||||
|
COMMON_TYPES[_type] = _type
|
||||||
|
del _type
|
||||||
|
|
||||||
|
_CACHE = {}
|
||||||
|
|
||||||
|
def resolve_common_type(parser, commontype):
|
||||||
|
try:
|
||||||
|
return _CACHE[commontype]
|
||||||
|
except KeyError:
|
||||||
|
cdecl = COMMON_TYPES.get(commontype, commontype)
|
||||||
|
if not isinstance(cdecl, str):
|
||||||
|
result, quals = cdecl, 0 # cdecl is already a BaseType
|
||||||
|
elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||||
|
result, quals = model.PrimitiveType(cdecl), 0
|
||||||
|
elif cdecl == 'set-unicode-needed':
|
||||||
|
raise FFIError("The Windows type %r is only available after "
|
||||||
|
"you call ffi.set_unicode()" % (commontype,))
|
||||||
|
else:
|
||||||
|
if commontype == cdecl:
|
||||||
|
raise FFIError(
|
||||||
|
"Unsupported type: %r. Please look at "
|
||||||
|
"http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
|
||||||
|
"and file an issue if you think this type should really "
|
||||||
|
"be supported." % (commontype,))
|
||||||
|
result, quals = parser.parse_type_and_quals(cdecl) # recursive
|
||||||
|
|
||||||
|
assert isinstance(result, model.BaseTypeByIdentity)
|
||||||
|
_CACHE[commontype] = result, quals
|
||||||
|
return result, quals
|
||||||
|
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
# extra types for Windows (most of them are in commontypes.c)
|
||||||
|
|
||||||
|
|
||||||
|
def win_common_types():
|
||||||
|
return {
|
||||||
|
"UNICODE_STRING": model.StructType(
|
||||||
|
"_UNICODE_STRING",
|
||||||
|
["Length",
|
||||||
|
"MaximumLength",
|
||||||
|
"Buffer"],
|
||||||
|
[model.PrimitiveType("unsigned short"),
|
||||||
|
model.PrimitiveType("unsigned short"),
|
||||||
|
model.PointerType(model.PrimitiveType("wchar_t"))],
|
||||||
|
[-1, -1, -1]),
|
||||||
|
"PUNICODE_STRING": "UNICODE_STRING *",
|
||||||
|
"PCUNICODE_STRING": "const UNICODE_STRING *",
|
||||||
|
|
||||||
|
"TBYTE": "set-unicode-needed",
|
||||||
|
"TCHAR": "set-unicode-needed",
|
||||||
|
"LPCTSTR": "set-unicode-needed",
|
||||||
|
"PCTSTR": "set-unicode-needed",
|
||||||
|
"LPTSTR": "set-unicode-needed",
|
||||||
|
"PTSTR": "set-unicode-needed",
|
||||||
|
"PTBYTE": "set-unicode-needed",
|
||||||
|
"PTCHAR": "set-unicode-needed",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
COMMON_TYPES.update(win_common_types())
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,31 @@
|
||||||
|
|
||||||
|
class FFIError(Exception):
|
||||||
|
__module__ = 'cffi'
|
||||||
|
|
||||||
|
class CDefError(Exception):
|
||||||
|
__module__ = 'cffi'
|
||||||
|
def __str__(self):
|
||||||
|
try:
|
||||||
|
current_decl = self.args[1]
|
||||||
|
filename = current_decl.coord.file
|
||||||
|
linenum = current_decl.coord.line
|
||||||
|
prefix = '%s:%d: ' % (filename, linenum)
|
||||||
|
except (AttributeError, TypeError, IndexError):
|
||||||
|
prefix = ''
|
||||||
|
return '%s%s' % (prefix, self.args[0])
|
||||||
|
|
||||||
|
class VerificationError(Exception):
|
||||||
|
""" An error raised when verification fails
|
||||||
|
"""
|
||||||
|
__module__ = 'cffi'
|
||||||
|
|
||||||
|
class VerificationMissing(Exception):
|
||||||
|
""" An error raised when incomplete structures are passed into
|
||||||
|
cdef, but no verification has been done
|
||||||
|
"""
|
||||||
|
__module__ = 'cffi'
|
||||||
|
|
||||||
|
class PkgConfigError(Exception):
|
||||||
|
""" An error raised for missing modules in pkg-config
|
||||||
|
"""
|
||||||
|
__module__ = 'cffi'
|
|
@ -0,0 +1,127 @@
|
||||||
|
import sys, os
|
||||||
|
from .error import VerificationError
|
||||||
|
|
||||||
|
|
||||||
|
LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
|
||||||
|
'extra_objects', 'depends']
|
||||||
|
|
||||||
|
def get_extension(srcfilename, modname, sources=(), **kwds):
|
||||||
|
_hack_at_distutils()
|
||||||
|
from distutils.core import Extension
|
||||||
|
allsources = [srcfilename]
|
||||||
|
for src in sources:
|
||||||
|
allsources.append(os.path.normpath(src))
|
||||||
|
return Extension(name=modname, sources=allsources, **kwds)
|
||||||
|
|
||||||
|
def compile(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||||
|
"""Compile a C extension module using distutils."""
|
||||||
|
|
||||||
|
_hack_at_distutils()
|
||||||
|
saved_environ = os.environ.copy()
|
||||||
|
try:
|
||||||
|
outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
|
||||||
|
outputfilename = os.path.abspath(outputfilename)
|
||||||
|
finally:
|
||||||
|
# workaround for a distutils bugs where some env vars can
|
||||||
|
# become longer and longer every time it is used
|
||||||
|
for key, value in saved_environ.items():
|
||||||
|
if os.environ.get(key) != value:
|
||||||
|
os.environ[key] = value
|
||||||
|
return outputfilename
|
||||||
|
|
||||||
|
def _build(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||||
|
# XXX compact but horrible :-(
|
||||||
|
from distutils.core import Distribution
|
||||||
|
import distutils.errors, distutils.log
|
||||||
|
#
|
||||||
|
dist = Distribution({'ext_modules': [ext]})
|
||||||
|
dist.parse_config_files()
|
||||||
|
options = dist.get_option_dict('build_ext')
|
||||||
|
if debug is None:
|
||||||
|
debug = sys.flags.debug
|
||||||
|
options['debug'] = ('ffiplatform', debug)
|
||||||
|
options['force'] = ('ffiplatform', True)
|
||||||
|
options['build_lib'] = ('ffiplatform', tmpdir)
|
||||||
|
options['build_temp'] = ('ffiplatform', tmpdir)
|
||||||
|
#
|
||||||
|
try:
|
||||||
|
old_level = distutils.log.set_threshold(0) or 0
|
||||||
|
try:
|
||||||
|
distutils.log.set_verbosity(compiler_verbose)
|
||||||
|
dist.run_command('build_ext')
|
||||||
|
cmd_obj = dist.get_command_obj('build_ext')
|
||||||
|
[soname] = cmd_obj.get_outputs()
|
||||||
|
finally:
|
||||||
|
distutils.log.set_threshold(old_level)
|
||||||
|
except (distutils.errors.CompileError,
|
||||||
|
distutils.errors.LinkError) as e:
|
||||||
|
raise VerificationError('%s: %s' % (e.__class__.__name__, e))
|
||||||
|
#
|
||||||
|
return soname
|
||||||
|
|
||||||
|
try:
|
||||||
|
from os.path import samefile
|
||||||
|
except ImportError:
|
||||||
|
def samefile(f1, f2):
|
||||||
|
return os.path.abspath(f1) == os.path.abspath(f2)
|
||||||
|
|
||||||
|
def maybe_relative_path(path):
|
||||||
|
if not os.path.isabs(path):
|
||||||
|
return path # already relative
|
||||||
|
dir = path
|
||||||
|
names = []
|
||||||
|
while True:
|
||||||
|
prevdir = dir
|
||||||
|
dir, name = os.path.split(prevdir)
|
||||||
|
if dir == prevdir or not dir:
|
||||||
|
return path # failed to make it relative
|
||||||
|
names.append(name)
|
||||||
|
try:
|
||||||
|
if samefile(dir, os.curdir):
|
||||||
|
names.reverse()
|
||||||
|
return os.path.join(*names)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
|
||||||
|
try:
|
||||||
|
int_or_long = (int, long)
|
||||||
|
import cStringIO
|
||||||
|
except NameError:
|
||||||
|
int_or_long = int # Python 3
|
||||||
|
import io as cStringIO
|
||||||
|
|
||||||
|
def _flatten(x, f):
|
||||||
|
if isinstance(x, str):
|
||||||
|
f.write('%ds%s' % (len(x), x))
|
||||||
|
elif isinstance(x, dict):
|
||||||
|
keys = sorted(x.keys())
|
||||||
|
f.write('%dd' % len(keys))
|
||||||
|
for key in keys:
|
||||||
|
_flatten(key, f)
|
||||||
|
_flatten(x[key], f)
|
||||||
|
elif isinstance(x, (list, tuple)):
|
||||||
|
f.write('%dl' % len(x))
|
||||||
|
for value in x:
|
||||||
|
_flatten(value, f)
|
||||||
|
elif isinstance(x, int_or_long):
|
||||||
|
f.write('%di' % (x,))
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
"the keywords to verify() contains unsupported object %r" % (x,))
|
||||||
|
|
||||||
|
def flatten(x):
|
||||||
|
f = cStringIO.StringIO()
|
||||||
|
_flatten(x, f)
|
||||||
|
return f.getvalue()
|
||||||
|
|
||||||
|
def _hack_at_distutils():
|
||||||
|
# Windows-only workaround for some configurations: see
|
||||||
|
# https://bugs.python.org/issue23246 (Python 2.7 with
|
||||||
|
# a specific MS compiler suite download)
|
||||||
|
if sys.platform == "win32":
|
||||||
|
try:
|
||||||
|
import setuptools # for side-effects, patches distutils
|
||||||
|
except ImportError:
|
||||||
|
pass
|
|
@ -0,0 +1,30 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info < (3,):
|
||||||
|
try:
|
||||||
|
from thread import allocate_lock
|
||||||
|
except ImportError:
|
||||||
|
from dummy_thread import allocate_lock
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
from _thread import allocate_lock
|
||||||
|
except ImportError:
|
||||||
|
from _dummy_thread import allocate_lock
|
||||||
|
|
||||||
|
|
||||||
|
##import sys
|
||||||
|
##l1 = allocate_lock
|
||||||
|
|
||||||
|
##class allocate_lock(object):
|
||||||
|
## def __init__(self):
|
||||||
|
## self._real = l1()
|
||||||
|
## def __enter__(self):
|
||||||
|
## for i in range(4, 0, -1):
|
||||||
|
## print sys._getframe(i).f_code
|
||||||
|
## print
|
||||||
|
## return self._real.__enter__()
|
||||||
|
## def __exit__(self, *args):
|
||||||
|
## return self._real.__exit__(*args)
|
||||||
|
## def acquire(self, f):
|
||||||
|
## assert f is False
|
||||||
|
## return self._real.acquire(f)
|
|
@ -0,0 +1,617 @@
|
||||||
|
import types
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
from .lock import allocate_lock
|
||||||
|
from .error import CDefError, VerificationError, VerificationMissing
|
||||||
|
|
||||||
|
# type qualifiers
|
||||||
|
Q_CONST = 0x01
|
||||||
|
Q_RESTRICT = 0x02
|
||||||
|
Q_VOLATILE = 0x04
|
||||||
|
|
||||||
|
def qualify(quals, replace_with):
|
||||||
|
if quals & Q_CONST:
|
||||||
|
replace_with = ' const ' + replace_with.lstrip()
|
||||||
|
if quals & Q_VOLATILE:
|
||||||
|
replace_with = ' volatile ' + replace_with.lstrip()
|
||||||
|
if quals & Q_RESTRICT:
|
||||||
|
# It seems that __restrict is supported by gcc and msvc.
|
||||||
|
# If you hit some different compiler, add a #define in
|
||||||
|
# _cffi_include.h for it (and in its copies, documented there)
|
||||||
|
replace_with = ' __restrict ' + replace_with.lstrip()
|
||||||
|
return replace_with
|
||||||
|
|
||||||
|
|
||||||
|
class BaseTypeByIdentity(object):
|
||||||
|
is_array_type = False
|
||||||
|
is_raw_function = False
|
||||||
|
|
||||||
|
def get_c_name(self, replace_with='', context='a C file', quals=0):
|
||||||
|
result = self.c_name_with_marker
|
||||||
|
assert result.count('&') == 1
|
||||||
|
# some logic duplication with ffi.getctype()... :-(
|
||||||
|
replace_with = replace_with.strip()
|
||||||
|
if replace_with:
|
||||||
|
if replace_with.startswith('*') and '&[' in result:
|
||||||
|
replace_with = '(%s)' % replace_with
|
||||||
|
elif not replace_with[0] in '[(':
|
||||||
|
replace_with = ' ' + replace_with
|
||||||
|
replace_with = qualify(quals, replace_with)
|
||||||
|
result = result.replace('&', replace_with)
|
||||||
|
if '$' in result:
|
||||||
|
raise VerificationError(
|
||||||
|
"cannot generate '%s' in %s: unknown type name"
|
||||||
|
% (self._get_c_name(), context))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_c_name(self):
|
||||||
|
return self.c_name_with_marker.replace('&', '')
|
||||||
|
|
||||||
|
def has_c_name(self):
|
||||||
|
return '$' not in self._get_c_name()
|
||||||
|
|
||||||
|
def is_integer_type(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||||
|
try:
|
||||||
|
BType = ffi._cached_btypes[self]
|
||||||
|
except KeyError:
|
||||||
|
BType = self.build_backend_type(ffi, finishlist)
|
||||||
|
BType2 = ffi._cached_btypes.setdefault(self, BType)
|
||||||
|
assert BType2 is BType
|
||||||
|
return BType
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s>' % (self._get_c_name(),)
|
||||||
|
|
||||||
|
def _get_items(self):
|
||||||
|
return [(name, getattr(self, name)) for name in self._attrs_]
|
||||||
|
|
||||||
|
|
||||||
|
class BaseType(BaseTypeByIdentity):
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (self.__class__ == other.__class__ and
|
||||||
|
self._get_items() == other._get_items())
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.__class__, tuple(self._get_items())))
|
||||||
|
|
||||||
|
|
||||||
|
class VoidType(BaseType):
|
||||||
|
_attrs_ = ()
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.c_name_with_marker = 'void&'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
return global_cache(self, ffi, 'new_void_type')
|
||||||
|
|
||||||
|
void_type = VoidType()
|
||||||
|
|
||||||
|
|
||||||
|
class BasePrimitiveType(BaseType):
|
||||||
|
def is_complex_type(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class PrimitiveType(BasePrimitiveType):
|
||||||
|
_attrs_ = ('name',)
|
||||||
|
|
||||||
|
ALL_PRIMITIVE_TYPES = {
|
||||||
|
'char': 'c',
|
||||||
|
'short': 'i',
|
||||||
|
'int': 'i',
|
||||||
|
'long': 'i',
|
||||||
|
'long long': 'i',
|
||||||
|
'signed char': 'i',
|
||||||
|
'unsigned char': 'i',
|
||||||
|
'unsigned short': 'i',
|
||||||
|
'unsigned int': 'i',
|
||||||
|
'unsigned long': 'i',
|
||||||
|
'unsigned long long': 'i',
|
||||||
|
'float': 'f',
|
||||||
|
'double': 'f',
|
||||||
|
'long double': 'f',
|
||||||
|
'float _Complex': 'j',
|
||||||
|
'double _Complex': 'j',
|
||||||
|
'_Bool': 'i',
|
||||||
|
# the following types are not primitive in the C sense
|
||||||
|
'wchar_t': 'c',
|
||||||
|
'char16_t': 'c',
|
||||||
|
'char32_t': 'c',
|
||||||
|
'int8_t': 'i',
|
||||||
|
'uint8_t': 'i',
|
||||||
|
'int16_t': 'i',
|
||||||
|
'uint16_t': 'i',
|
||||||
|
'int32_t': 'i',
|
||||||
|
'uint32_t': 'i',
|
||||||
|
'int64_t': 'i',
|
||||||
|
'uint64_t': 'i',
|
||||||
|
'int_least8_t': 'i',
|
||||||
|
'uint_least8_t': 'i',
|
||||||
|
'int_least16_t': 'i',
|
||||||
|
'uint_least16_t': 'i',
|
||||||
|
'int_least32_t': 'i',
|
||||||
|
'uint_least32_t': 'i',
|
||||||
|
'int_least64_t': 'i',
|
||||||
|
'uint_least64_t': 'i',
|
||||||
|
'int_fast8_t': 'i',
|
||||||
|
'uint_fast8_t': 'i',
|
||||||
|
'int_fast16_t': 'i',
|
||||||
|
'uint_fast16_t': 'i',
|
||||||
|
'int_fast32_t': 'i',
|
||||||
|
'uint_fast32_t': 'i',
|
||||||
|
'int_fast64_t': 'i',
|
||||||
|
'uint_fast64_t': 'i',
|
||||||
|
'intptr_t': 'i',
|
||||||
|
'uintptr_t': 'i',
|
||||||
|
'intmax_t': 'i',
|
||||||
|
'uintmax_t': 'i',
|
||||||
|
'ptrdiff_t': 'i',
|
||||||
|
'size_t': 'i',
|
||||||
|
'ssize_t': 'i',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
assert name in self.ALL_PRIMITIVE_TYPES
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def is_char_type(self):
|
||||||
|
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
|
||||||
|
def is_integer_type(self):
|
||||||
|
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
|
||||||
|
def is_float_type(self):
|
||||||
|
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
|
||||||
|
def is_complex_type(self):
|
||||||
|
return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
return global_cache(self, ffi, 'new_primitive_type', self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownIntegerType(BasePrimitiveType):
|
||||||
|
_attrs_ = ('name',)
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def is_integer_type(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
raise NotImplementedError("integer type '%s' can only be used after "
|
||||||
|
"compilation" % self.name)
|
||||||
|
|
||||||
|
class UnknownFloatType(BasePrimitiveType):
|
||||||
|
_attrs_ = ('name', )
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
raise NotImplementedError("float type '%s' can only be used after "
|
||||||
|
"compilation" % self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFunctionType(BaseType):
|
||||||
|
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
|
||||||
|
|
||||||
|
def __init__(self, args, result, ellipsis, abi=None):
|
||||||
|
self.args = args
|
||||||
|
self.result = result
|
||||||
|
self.ellipsis = ellipsis
|
||||||
|
self.abi = abi
|
||||||
|
#
|
||||||
|
reprargs = [arg._get_c_name() for arg in self.args]
|
||||||
|
if self.ellipsis:
|
||||||
|
reprargs.append('...')
|
||||||
|
reprargs = reprargs or ['void']
|
||||||
|
replace_with = self._base_pattern % (', '.join(reprargs),)
|
||||||
|
if abi is not None:
|
||||||
|
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
|
||||||
|
self.c_name_with_marker = (
|
||||||
|
self.result.c_name_with_marker.replace('&', replace_with))
|
||||||
|
|
||||||
|
|
||||||
|
class RawFunctionType(BaseFunctionType):
|
||||||
|
# Corresponds to a C type like 'int(int)', which is the C type of
|
||||||
|
# a function, but not a pointer-to-function. The backend has no
|
||||||
|
# notion of such a type; it's used temporarily by parsing.
|
||||||
|
_base_pattern = '(&)(%s)'
|
||||||
|
is_raw_function = True
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
raise CDefError("cannot render the type %r: it is a function "
|
||||||
|
"type, not a pointer-to-function type" % (self,))
|
||||||
|
|
||||||
|
def as_function_pointer(self):
|
||||||
|
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionPtrType(BaseFunctionType):
|
||||||
|
_base_pattern = '(*&)(%s)'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
result = self.result.get_cached_btype(ffi, finishlist)
|
||||||
|
args = []
|
||||||
|
for tp in self.args:
|
||||||
|
args.append(tp.get_cached_btype(ffi, finishlist))
|
||||||
|
abi_args = ()
|
||||||
|
if self.abi == "__stdcall":
|
||||||
|
if not self.ellipsis: # __stdcall ignored for variadic funcs
|
||||||
|
try:
|
||||||
|
abi_args = (ffi._backend.FFI_STDCALL,)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return global_cache(self, ffi, 'new_function_type',
|
||||||
|
tuple(args), result, self.ellipsis, *abi_args)
|
||||||
|
|
||||||
|
def as_raw_function(self):
|
||||||
|
return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
|
||||||
|
|
||||||
|
|
||||||
|
class PointerType(BaseType):
|
||||||
|
_attrs_ = ('totype', 'quals')
|
||||||
|
|
||||||
|
def __init__(self, totype, quals=0):
|
||||||
|
self.totype = totype
|
||||||
|
self.quals = quals
|
||||||
|
extra = qualify(quals, " *&")
|
||||||
|
if totype.is_array_type:
|
||||||
|
extra = "(%s)" % (extra.lstrip(),)
|
||||||
|
self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
|
||||||
|
return global_cache(self, ffi, 'new_pointer_type', BItem)
|
||||||
|
|
||||||
|
voidp_type = PointerType(void_type)
|
||||||
|
|
||||||
|
def ConstPointerType(totype):
|
||||||
|
return PointerType(totype, Q_CONST)
|
||||||
|
|
||||||
|
const_voidp_type = ConstPointerType(void_type)
|
||||||
|
|
||||||
|
|
||||||
|
class NamedPointerType(PointerType):
|
||||||
|
_attrs_ = ('totype', 'name')
|
||||||
|
|
||||||
|
def __init__(self, totype, name, quals=0):
|
||||||
|
PointerType.__init__(self, totype, quals)
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
|
||||||
|
class ArrayType(BaseType):
|
||||||
|
_attrs_ = ('item', 'length')
|
||||||
|
is_array_type = True
|
||||||
|
|
||||||
|
def __init__(self, item, length):
|
||||||
|
self.item = item
|
||||||
|
self.length = length
|
||||||
|
#
|
||||||
|
if length is None:
|
||||||
|
brackets = '&[]'
|
||||||
|
elif length == '...':
|
||||||
|
brackets = '&[/*...*/]'
|
||||||
|
else:
|
||||||
|
brackets = '&[%s]' % length
|
||||||
|
self.c_name_with_marker = (
|
||||||
|
self.item.c_name_with_marker.replace('&', brackets))
|
||||||
|
|
||||||
|
def length_is_unknown(self):
|
||||||
|
return isinstance(self.length, str)
|
||||||
|
|
||||||
|
def resolve_length(self, newlength):
|
||||||
|
return ArrayType(self.item, newlength)
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
if self.length_is_unknown():
|
||||||
|
raise CDefError("cannot render the type %r: unknown length" %
|
||||||
|
(self,))
|
||||||
|
self.item.get_cached_btype(ffi, finishlist) # force the item BType
|
||||||
|
BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
|
||||||
|
return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
|
||||||
|
|
||||||
|
char_array_type = ArrayType(PrimitiveType('char'), None)
|
||||||
|
|
||||||
|
|
||||||
|
class StructOrUnionOrEnum(BaseTypeByIdentity):
|
||||||
|
_attrs_ = ('name',)
|
||||||
|
forcename = None
|
||||||
|
|
||||||
|
def build_c_name_with_marker(self):
|
||||||
|
name = self.forcename or '%s %s' % (self.kind, self.name)
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def force_the_name(self, forcename):
|
||||||
|
self.forcename = forcename
|
||||||
|
self.build_c_name_with_marker()
|
||||||
|
|
||||||
|
def get_official_name(self):
|
||||||
|
assert self.c_name_with_marker.endswith('&')
|
||||||
|
return self.c_name_with_marker[:-1]
|
||||||
|
|
||||||
|
|
||||||
|
class StructOrUnion(StructOrUnionOrEnum):
|
||||||
|
fixedlayout = None
|
||||||
|
completed = 0
|
||||||
|
partial = False
|
||||||
|
packed = 0
|
||||||
|
|
||||||
|
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
|
||||||
|
self.name = name
|
||||||
|
self.fldnames = fldnames
|
||||||
|
self.fldtypes = fldtypes
|
||||||
|
self.fldbitsize = fldbitsize
|
||||||
|
self.fldquals = fldquals
|
||||||
|
self.build_c_name_with_marker()
|
||||||
|
|
||||||
|
def anonymous_struct_fields(self):
|
||||||
|
if self.fldtypes is not None:
|
||||||
|
for name, type in zip(self.fldnames, self.fldtypes):
|
||||||
|
if name == '' and isinstance(type, StructOrUnion):
|
||||||
|
yield type
|
||||||
|
|
||||||
|
def enumfields(self, expand_anonymous_struct_union=True):
|
||||||
|
fldquals = self.fldquals
|
||||||
|
if fldquals is None:
|
||||||
|
fldquals = (0,) * len(self.fldnames)
|
||||||
|
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
|
||||||
|
self.fldbitsize, fldquals):
|
||||||
|
if (name == '' and isinstance(type, StructOrUnion)
|
||||||
|
and expand_anonymous_struct_union):
|
||||||
|
# nested anonymous struct/union
|
||||||
|
for result in type.enumfields():
|
||||||
|
yield result
|
||||||
|
else:
|
||||||
|
yield (name, type, bitsize, quals)
|
||||||
|
|
||||||
|
def force_flatten(self):
|
||||||
|
# force the struct or union to have a declaration that lists
|
||||||
|
# directly all fields returned by enumfields(), flattening
|
||||||
|
# nested anonymous structs/unions.
|
||||||
|
names = []
|
||||||
|
types = []
|
||||||
|
bitsizes = []
|
||||||
|
fldquals = []
|
||||||
|
for name, type, bitsize, quals in self.enumfields():
|
||||||
|
names.append(name)
|
||||||
|
types.append(type)
|
||||||
|
bitsizes.append(bitsize)
|
||||||
|
fldquals.append(quals)
|
||||||
|
self.fldnames = tuple(names)
|
||||||
|
self.fldtypes = tuple(types)
|
||||||
|
self.fldbitsize = tuple(bitsizes)
|
||||||
|
self.fldquals = tuple(fldquals)
|
||||||
|
|
||||||
|
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||||
|
BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
|
||||||
|
can_delay)
|
||||||
|
if not can_delay:
|
||||||
|
self.finish_backend_type(ffi, finishlist)
|
||||||
|
return BType
|
||||||
|
|
||||||
|
def finish_backend_type(self, ffi, finishlist):
|
||||||
|
if self.completed:
|
||||||
|
if self.completed != 2:
|
||||||
|
raise NotImplementedError("recursive structure declaration "
|
||||||
|
"for '%s'" % (self.name,))
|
||||||
|
return
|
||||||
|
BType = ffi._cached_btypes[self]
|
||||||
|
#
|
||||||
|
self.completed = 1
|
||||||
|
#
|
||||||
|
if self.fldtypes is None:
|
||||||
|
pass # not completing it: it's an opaque struct
|
||||||
|
#
|
||||||
|
elif self.fixedlayout is None:
|
||||||
|
fldtypes = [tp.get_cached_btype(ffi, finishlist)
|
||||||
|
for tp in self.fldtypes]
|
||||||
|
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
|
||||||
|
extra_flags = ()
|
||||||
|
if self.packed:
|
||||||
|
if self.packed == 1:
|
||||||
|
extra_flags = (8,) # SF_PACKED
|
||||||
|
else:
|
||||||
|
extra_flags = (0, self.packed)
|
||||||
|
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||||
|
-1, -1, *extra_flags)
|
||||||
|
#
|
||||||
|
else:
|
||||||
|
fldtypes = []
|
||||||
|
fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
|
||||||
|
for i in range(len(self.fldnames)):
|
||||||
|
fsize = fieldsize[i]
|
||||||
|
ftype = self.fldtypes[i]
|
||||||
|
#
|
||||||
|
if isinstance(ftype, ArrayType) and ftype.length_is_unknown():
|
||||||
|
# fix the length to match the total size
|
||||||
|
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
|
||||||
|
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
|
||||||
|
if nrest != 0:
|
||||||
|
self._verification_error(
|
||||||
|
"field '%s.%s' has a bogus size?" % (
|
||||||
|
self.name, self.fldnames[i] or '{}'))
|
||||||
|
ftype = ftype.resolve_length(nlen)
|
||||||
|
self.fldtypes = (self.fldtypes[:i] + (ftype,) +
|
||||||
|
self.fldtypes[i+1:])
|
||||||
|
#
|
||||||
|
BFieldType = ftype.get_cached_btype(ffi, finishlist)
|
||||||
|
if isinstance(ftype, ArrayType) and ftype.length is None:
|
||||||
|
assert fsize == 0
|
||||||
|
else:
|
||||||
|
bitemsize = ffi.sizeof(BFieldType)
|
||||||
|
if bitemsize != fsize:
|
||||||
|
self._verification_error(
|
||||||
|
"field '%s.%s' is declared as %d bytes, but is "
|
||||||
|
"really %d bytes" % (self.name,
|
||||||
|
self.fldnames[i] or '{}',
|
||||||
|
bitemsize, fsize))
|
||||||
|
fldtypes.append(BFieldType)
|
||||||
|
#
|
||||||
|
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
|
||||||
|
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||||
|
totalsize, totalalignment)
|
||||||
|
self.completed = 2
|
||||||
|
|
||||||
|
def _verification_error(self, msg):
|
||||||
|
raise VerificationError(msg)
|
||||||
|
|
||||||
|
def check_not_partial(self):
|
||||||
|
if self.partial and self.fixedlayout is None:
|
||||||
|
raise VerificationMissing(self._get_c_name())
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
self.check_not_partial()
|
||||||
|
finishlist.append(self)
|
||||||
|
#
|
||||||
|
return global_cache(self, ffi, 'new_%s_type' % self.kind,
|
||||||
|
self.get_official_name(), key=self)
|
||||||
|
|
||||||
|
|
||||||
|
class StructType(StructOrUnion):
|
||||||
|
kind = 'struct'
|
||||||
|
|
||||||
|
|
||||||
|
class UnionType(StructOrUnion):
|
||||||
|
kind = 'union'
|
||||||
|
|
||||||
|
|
||||||
|
class EnumType(StructOrUnionOrEnum):
|
||||||
|
kind = 'enum'
|
||||||
|
partial = False
|
||||||
|
partial_resolved = False
|
||||||
|
|
||||||
|
def __init__(self, name, enumerators, enumvalues, baseinttype=None):
|
||||||
|
self.name = name
|
||||||
|
self.enumerators = enumerators
|
||||||
|
self.enumvalues = enumvalues
|
||||||
|
self.baseinttype = baseinttype
|
||||||
|
self.build_c_name_with_marker()
|
||||||
|
|
||||||
|
def force_the_name(self, forcename):
|
||||||
|
StructOrUnionOrEnum.force_the_name(self, forcename)
|
||||||
|
if self.forcename is None:
|
||||||
|
name = self.get_official_name()
|
||||||
|
self.forcename = '$' + name.replace(' ', '_')
|
||||||
|
|
||||||
|
def check_not_partial(self):
|
||||||
|
if self.partial and not self.partial_resolved:
|
||||||
|
raise VerificationMissing(self._get_c_name())
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
self.check_not_partial()
|
||||||
|
base_btype = self.build_baseinttype(ffi, finishlist)
|
||||||
|
return global_cache(self, ffi, 'new_enum_type',
|
||||||
|
self.get_official_name(),
|
||||||
|
self.enumerators, self.enumvalues,
|
||||||
|
base_btype, key=self)
|
||||||
|
|
||||||
|
def build_baseinttype(self, ffi, finishlist):
|
||||||
|
if self.baseinttype is not None:
|
||||||
|
return self.baseinttype.get_cached_btype(ffi, finishlist)
|
||||||
|
#
|
||||||
|
if self.enumvalues:
|
||||||
|
smallest_value = min(self.enumvalues)
|
||||||
|
largest_value = max(self.enumvalues)
|
||||||
|
else:
|
||||||
|
import warnings
|
||||||
|
try:
|
||||||
|
# XXX! The goal is to ensure that the warnings.warn()
|
||||||
|
# will not suppress the warning. We want to get it
|
||||||
|
# several times if we reach this point several times.
|
||||||
|
__warningregistry__.clear()
|
||||||
|
except NameError:
|
||||||
|
pass
|
||||||
|
warnings.warn("%r has no values explicitly defined; "
|
||||||
|
"guessing that it is equivalent to 'unsigned int'"
|
||||||
|
% self._get_c_name())
|
||||||
|
smallest_value = largest_value = 0
|
||||||
|
if smallest_value < 0: # needs a signed type
|
||||||
|
sign = 1
|
||||||
|
candidate1 = PrimitiveType("int")
|
||||||
|
candidate2 = PrimitiveType("long")
|
||||||
|
else:
|
||||||
|
sign = 0
|
||||||
|
candidate1 = PrimitiveType("unsigned int")
|
||||||
|
candidate2 = PrimitiveType("unsigned long")
|
||||||
|
btype1 = candidate1.get_cached_btype(ffi, finishlist)
|
||||||
|
btype2 = candidate2.get_cached_btype(ffi, finishlist)
|
||||||
|
size1 = ffi.sizeof(btype1)
|
||||||
|
size2 = ffi.sizeof(btype2)
|
||||||
|
if (smallest_value >= ((-1) << (8*size1-1)) and
|
||||||
|
largest_value < (1 << (8*size1-sign))):
|
||||||
|
return btype1
|
||||||
|
if (smallest_value >= ((-1) << (8*size2-1)) and
|
||||||
|
largest_value < (1 << (8*size2-sign))):
|
||||||
|
return btype2
|
||||||
|
raise CDefError("%s values don't all fit into either 'long' "
|
||||||
|
"or 'unsigned long'" % self._get_c_name())
|
||||||
|
|
||||||
|
def unknown_type(name, structname=None):
|
||||||
|
if structname is None:
|
||||||
|
structname = '$%s' % name
|
||||||
|
tp = StructType(structname, None, None, None)
|
||||||
|
tp.force_the_name(name)
|
||||||
|
tp.origin = "unknown_type"
|
||||||
|
return tp
|
||||||
|
|
||||||
|
def unknown_ptr_type(name, structname=None):
|
||||||
|
if structname is None:
|
||||||
|
structname = '$$%s' % name
|
||||||
|
tp = StructType(structname, None, None, None)
|
||||||
|
return NamedPointerType(tp, name)
|
||||||
|
|
||||||
|
|
||||||
|
global_lock = allocate_lock()
|
||||||
|
_typecache_cffi_backend = weakref.WeakValueDictionary()
|
||||||
|
|
||||||
|
def get_typecache(backend):
|
||||||
|
# returns _typecache_cffi_backend if backend is the _cffi_backend
|
||||||
|
# module, or type(backend).__typecache if backend is an instance of
|
||||||
|
# CTypesBackend (or some FakeBackend class during tests)
|
||||||
|
if isinstance(backend, types.ModuleType):
|
||||||
|
return _typecache_cffi_backend
|
||||||
|
with global_lock:
|
||||||
|
if not hasattr(type(backend), '__typecache'):
|
||||||
|
type(backend).__typecache = weakref.WeakValueDictionary()
|
||||||
|
return type(backend).__typecache
|
||||||
|
|
||||||
|
def global_cache(srctype, ffi, funcname, *args, **kwds):
|
||||||
|
key = kwds.pop('key', (funcname, args))
|
||||||
|
assert not kwds
|
||||||
|
try:
|
||||||
|
return ffi._typecache[key]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
res = getattr(ffi._backend, funcname)(*args)
|
||||||
|
except NotImplementedError as e:
|
||||||
|
raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
|
||||||
|
# note that setdefault() on WeakValueDictionary is not atomic
|
||||||
|
# and contains a rare bug (http://bugs.python.org/issue19542);
|
||||||
|
# we have to use a lock and do it ourselves
|
||||||
|
cache = ffi._typecache
|
||||||
|
with global_lock:
|
||||||
|
res1 = cache.get(key)
|
||||||
|
if res1 is None:
|
||||||
|
cache[key] = res
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
return res1
|
||||||
|
|
||||||
|
def pointer_cache(ffi, BType):
|
||||||
|
return global_cache('?', ffi, 'new_pointer_type', BType)
|
||||||
|
|
||||||
|
def attach_exception_info(e, name):
|
||||||
|
if e.args and type(e.args[0]) is str:
|
||||||
|
e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
|
|
@ -0,0 +1,121 @@
|
||||||
|
# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
|
||||||
|
import sys, os, subprocess
|
||||||
|
|
||||||
|
from .error import PkgConfigError
|
||||||
|
|
||||||
|
|
||||||
|
def merge_flags(cfg1, cfg2):
|
||||||
|
"""Merge values from cffi config flags cfg2 to cf1
|
||||||
|
|
||||||
|
Example:
|
||||||
|
merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
|
||||||
|
{"libraries": ["one", "two"]}
|
||||||
|
"""
|
||||||
|
for key, value in cfg2.items():
|
||||||
|
if key not in cfg1:
|
||||||
|
cfg1[key] = value
|
||||||
|
else:
|
||||||
|
if not isinstance(cfg1[key], list):
|
||||||
|
raise TypeError("cfg1[%r] should be a list of strings" % (key,))
|
||||||
|
if not isinstance(value, list):
|
||||||
|
raise TypeError("cfg2[%r] should be a list of strings" % (key,))
|
||||||
|
cfg1[key].extend(value)
|
||||||
|
return cfg1
|
||||||
|
|
||||||
|
|
||||||
|
def call(libname, flag, encoding=sys.getfilesystemencoding()):
|
||||||
|
"""Calls pkg-config and returns the output if found
|
||||||
|
"""
|
||||||
|
a = ["pkg-config", "--print-errors"]
|
||||||
|
a.append(flag)
|
||||||
|
a.append(libname)
|
||||||
|
try:
|
||||||
|
pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
except EnvironmentError as e:
|
||||||
|
raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
|
||||||
|
|
||||||
|
bout, berr = pc.communicate()
|
||||||
|
if pc.returncode != 0:
|
||||||
|
try:
|
||||||
|
berr = berr.decode(encoding)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise PkgConfigError(berr.strip())
|
||||||
|
|
||||||
|
if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
|
||||||
|
try:
|
||||||
|
bout = bout.decode(encoding)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
|
||||||
|
"be decoded with encoding %r:\n%r" %
|
||||||
|
(flag, libname, encoding, bout))
|
||||||
|
|
||||||
|
if os.altsep != '\\' and '\\' in bout:
|
||||||
|
raise PkgConfigError("pkg-config %s %s returned an unsupported "
|
||||||
|
"backslash-escaped output:\n%r" %
|
||||||
|
(flag, libname, bout))
|
||||||
|
return bout
|
||||||
|
|
||||||
|
|
||||||
|
def flags_from_pkgconfig(libs):
|
||||||
|
r"""Return compiler line flags for FFI.set_source based on pkg-config output
|
||||||
|
|
||||||
|
Usage
|
||||||
|
...
|
||||||
|
ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
|
||||||
|
|
||||||
|
If pkg-config is installed on build machine, then arguments include_dirs,
|
||||||
|
library_dirs, libraries, define_macros, extra_compile_args and
|
||||||
|
extra_link_args are extended with an output of pkg-config for libfoo and
|
||||||
|
libbar.
|
||||||
|
|
||||||
|
Raises PkgConfigError in case the pkg-config call fails.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_include_dirs(string):
|
||||||
|
return [x[2:] for x in string.split() if x.startswith("-I")]
|
||||||
|
|
||||||
|
def get_library_dirs(string):
|
||||||
|
return [x[2:] for x in string.split() if x.startswith("-L")]
|
||||||
|
|
||||||
|
def get_libraries(string):
|
||||||
|
return [x[2:] for x in string.split() if x.startswith("-l")]
|
||||||
|
|
||||||
|
# convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
|
||||||
|
def get_macros(string):
|
||||||
|
def _macro(x):
|
||||||
|
x = x[2:] # drop "-D"
|
||||||
|
if '=' in x:
|
||||||
|
return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
|
||||||
|
else:
|
||||||
|
return (x, None) # "-Dfoo" => ("foo", None)
|
||||||
|
return [_macro(x) for x in string.split() if x.startswith("-D")]
|
||||||
|
|
||||||
|
def get_other_cflags(string):
|
||||||
|
return [x for x in string.split() if not x.startswith("-I") and
|
||||||
|
not x.startswith("-D")]
|
||||||
|
|
||||||
|
def get_other_libs(string):
|
||||||
|
return [x for x in string.split() if not x.startswith("-L") and
|
||||||
|
not x.startswith("-l")]
|
||||||
|
|
||||||
|
# return kwargs for given libname
|
||||||
|
def kwargs(libname):
|
||||||
|
fse = sys.getfilesystemencoding()
|
||||||
|
all_cflags = call(libname, "--cflags")
|
||||||
|
all_libs = call(libname, "--libs")
|
||||||
|
return {
|
||||||
|
"include_dirs": get_include_dirs(all_cflags),
|
||||||
|
"library_dirs": get_library_dirs(all_libs),
|
||||||
|
"libraries": get_libraries(all_libs),
|
||||||
|
"define_macros": get_macros(all_cflags),
|
||||||
|
"extra_compile_args": get_other_cflags(all_cflags),
|
||||||
|
"extra_link_args": get_other_libs(all_libs),
|
||||||
|
}
|
||||||
|
|
||||||
|
# merge all arguments together
|
||||||
|
ret = {}
|
||||||
|
for libname in libs:
|
||||||
|
lib_flags = kwargs(libname)
|
||||||
|
merge_flags(ret, lib_flags)
|
||||||
|
return ret
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,219 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
basestring
|
||||||
|
except NameError:
|
||||||
|
# Python 3.x
|
||||||
|
basestring = str
|
||||||
|
|
||||||
|
def error(msg):
|
||||||
|
from distutils.errors import DistutilsSetupError
|
||||||
|
raise DistutilsSetupError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def execfile(filename, glob):
|
||||||
|
# We use execfile() (here rewritten for Python 3) instead of
|
||||||
|
# __import__() to load the build script. The problem with
|
||||||
|
# a normal import is that in some packages, the intermediate
|
||||||
|
# __init__.py files may already try to import the file that
|
||||||
|
# we are generating.
|
||||||
|
with open(filename) as f:
|
||||||
|
src = f.read()
|
||||||
|
src += '\n' # Python 2.6 compatibility
|
||||||
|
code = compile(src, filename, 'exec')
|
||||||
|
exec(code, glob, glob)
|
||||||
|
|
||||||
|
|
||||||
|
def add_cffi_module(dist, mod_spec):
|
||||||
|
from cffi.api import FFI
|
||||||
|
|
||||||
|
if not isinstance(mod_spec, basestring):
|
||||||
|
error("argument to 'cffi_modules=...' must be a str or a list of str,"
|
||||||
|
" not %r" % (type(mod_spec).__name__,))
|
||||||
|
mod_spec = str(mod_spec)
|
||||||
|
try:
|
||||||
|
build_file_name, ffi_var_name = mod_spec.split(':')
|
||||||
|
except ValueError:
|
||||||
|
error("%r must be of the form 'path/build.py:ffi_variable'" %
|
||||||
|
(mod_spec,))
|
||||||
|
if not os.path.exists(build_file_name):
|
||||||
|
ext = ''
|
||||||
|
rewritten = build_file_name.replace('.', '/') + '.py'
|
||||||
|
if os.path.exists(rewritten):
|
||||||
|
ext = ' (rewrite cffi_modules to [%r])' % (
|
||||||
|
rewritten + ':' + ffi_var_name,)
|
||||||
|
error("%r does not name an existing file%s" % (build_file_name, ext))
|
||||||
|
|
||||||
|
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
|
||||||
|
execfile(build_file_name, mod_vars)
|
||||||
|
|
||||||
|
try:
|
||||||
|
ffi = mod_vars[ffi_var_name]
|
||||||
|
except KeyError:
|
||||||
|
error("%r: object %r not found in module" % (mod_spec,
|
||||||
|
ffi_var_name))
|
||||||
|
if not isinstance(ffi, FFI):
|
||||||
|
ffi = ffi() # maybe it's a function instead of directly an ffi
|
||||||
|
if not isinstance(ffi, FFI):
|
||||||
|
error("%r is not an FFI instance (got %r)" % (mod_spec,
|
||||||
|
type(ffi).__name__))
|
||||||
|
if not hasattr(ffi, '_assigned_source'):
|
||||||
|
error("%r: the set_source() method was not called" % (mod_spec,))
|
||||||
|
module_name, source, source_extension, kwds = ffi._assigned_source
|
||||||
|
if ffi._windows_unicode:
|
||||||
|
kwds = kwds.copy()
|
||||||
|
ffi._apply_windows_unicode(kwds)
|
||||||
|
|
||||||
|
if source is None:
|
||||||
|
_add_py_module(dist, ffi, module_name)
|
||||||
|
else:
|
||||||
|
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
|
||||||
|
|
||||||
|
def _set_py_limited_api(Extension, kwds):
|
||||||
|
"""
|
||||||
|
Add py_limited_api to kwds if setuptools >= 26 is in use.
|
||||||
|
Do not alter the setting if it already exists.
|
||||||
|
Setuptools takes care of ignoring the flag on Python 2 and PyPy.
|
||||||
|
|
||||||
|
CPython itself should ignore the flag in a debugging version
|
||||||
|
(by not listing .abi3.so in the extensions it supports), but
|
||||||
|
it doesn't so far, creating troubles. That's why we check
|
||||||
|
for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
|
||||||
|
of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
|
||||||
|
|
||||||
|
On Windows, with CPython <= 3.4, it's better not to use py_limited_api
|
||||||
|
because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
|
||||||
|
Recently (2020) we started shipping only >= 3.5 wheels, though. So
|
||||||
|
we'll give it another try and set py_limited_api on Windows >= 3.5.
|
||||||
|
"""
|
||||||
|
from cffi import recompiler
|
||||||
|
|
||||||
|
if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
|
||||||
|
and recompiler.USE_LIMITED_API):
|
||||||
|
import setuptools
|
||||||
|
try:
|
||||||
|
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
|
||||||
|
if setuptools_major_version >= 26:
|
||||||
|
kwds['py_limited_api'] = True
|
||||||
|
except ValueError: # certain development versions of setuptools
|
||||||
|
# If we don't know the version number of setuptools, we
|
||||||
|
# try to set 'py_limited_api' anyway. At worst, we get a
|
||||||
|
# warning.
|
||||||
|
kwds['py_limited_api'] = True
|
||||||
|
return kwds
|
||||||
|
|
||||||
|
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
|
||||||
|
from distutils.core import Extension
|
||||||
|
# We are a setuptools extension. Need this build_ext for py_limited_api.
|
||||||
|
from setuptools.command.build_ext import build_ext
|
||||||
|
from distutils.dir_util import mkpath
|
||||||
|
from distutils import log
|
||||||
|
from cffi import recompiler
|
||||||
|
|
||||||
|
allsources = ['$PLACEHOLDER']
|
||||||
|
allsources.extend(kwds.pop('sources', []))
|
||||||
|
kwds = _set_py_limited_api(Extension, kwds)
|
||||||
|
ext = Extension(name=module_name, sources=allsources, **kwds)
|
||||||
|
|
||||||
|
def make_mod(tmpdir, pre_run=None):
|
||||||
|
c_file = os.path.join(tmpdir, module_name + source_extension)
|
||||||
|
log.info("generating cffi module %r" % c_file)
|
||||||
|
mkpath(tmpdir)
|
||||||
|
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
|
||||||
|
# arguments just before we turn the ffi into C code. To use it,
|
||||||
|
# subclass the 'distutils.command.build_ext.build_ext' class and
|
||||||
|
# add a method 'def pre_run(self, ext, ffi)'.
|
||||||
|
if pre_run is not None:
|
||||||
|
pre_run(ext, ffi)
|
||||||
|
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
|
||||||
|
if not updated:
|
||||||
|
log.info("already up-to-date")
|
||||||
|
return c_file
|
||||||
|
|
||||||
|
if dist.ext_modules is None:
|
||||||
|
dist.ext_modules = []
|
||||||
|
dist.ext_modules.append(ext)
|
||||||
|
|
||||||
|
base_class = dist.cmdclass.get('build_ext', build_ext)
|
||||||
|
class build_ext_make_mod(base_class):
|
||||||
|
def run(self):
|
||||||
|
if ext.sources[0] == '$PLACEHOLDER':
|
||||||
|
pre_run = getattr(self, 'pre_run', None)
|
||||||
|
ext.sources[0] = make_mod(self.build_temp, pre_run)
|
||||||
|
base_class.run(self)
|
||||||
|
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||||
|
# NB. multiple runs here will create multiple 'build_ext_make_mod'
|
||||||
|
# classes. Even in this case the 'build_ext' command should be
|
||||||
|
# run once; but just in case, the logic above does nothing if
|
||||||
|
# called again.
|
||||||
|
|
||||||
|
|
||||||
|
def _add_py_module(dist, ffi, module_name):
|
||||||
|
from distutils.dir_util import mkpath
|
||||||
|
from setuptools.command.build_py import build_py
|
||||||
|
from setuptools.command.build_ext import build_ext
|
||||||
|
from distutils import log
|
||||||
|
from cffi import recompiler
|
||||||
|
|
||||||
|
def generate_mod(py_file):
|
||||||
|
log.info("generating cffi module %r" % py_file)
|
||||||
|
mkpath(os.path.dirname(py_file))
|
||||||
|
updated = recompiler.make_py_source(ffi, module_name, py_file)
|
||||||
|
if not updated:
|
||||||
|
log.info("already up-to-date")
|
||||||
|
|
||||||
|
base_class = dist.cmdclass.get('build_py', build_py)
|
||||||
|
class build_py_make_mod(base_class):
|
||||||
|
def run(self):
|
||||||
|
base_class.run(self)
|
||||||
|
module_path = module_name.split('.')
|
||||||
|
module_path[-1] += '.py'
|
||||||
|
generate_mod(os.path.join(self.build_lib, *module_path))
|
||||||
|
def get_source_files(self):
|
||||||
|
# This is called from 'setup.py sdist' only. Exclude
|
||||||
|
# the generate .py module in this case.
|
||||||
|
saved_py_modules = self.py_modules
|
||||||
|
try:
|
||||||
|
if saved_py_modules:
|
||||||
|
self.py_modules = [m for m in saved_py_modules
|
||||||
|
if m != module_name]
|
||||||
|
return base_class.get_source_files(self)
|
||||||
|
finally:
|
||||||
|
self.py_modules = saved_py_modules
|
||||||
|
dist.cmdclass['build_py'] = build_py_make_mod
|
||||||
|
|
||||||
|
# distutils and setuptools have no notion I could find of a
|
||||||
|
# generated python module. If we don't add module_name to
|
||||||
|
# dist.py_modules, then things mostly work but there are some
|
||||||
|
# combination of options (--root and --record) that will miss
|
||||||
|
# the module. So we add it here, which gives a few apparently
|
||||||
|
# harmless warnings about not finding the file outside the
|
||||||
|
# build directory.
|
||||||
|
# Then we need to hack more in get_source_files(); see above.
|
||||||
|
if dist.py_modules is None:
|
||||||
|
dist.py_modules = []
|
||||||
|
dist.py_modules.append(module_name)
|
||||||
|
|
||||||
|
# the following is only for "build_ext -i"
|
||||||
|
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
|
||||||
|
class build_ext_make_mod(base_class_2):
|
||||||
|
def run(self):
|
||||||
|
base_class_2.run(self)
|
||||||
|
if self.inplace:
|
||||||
|
# from get_ext_fullpath() in distutils/command/build_ext.py
|
||||||
|
module_path = module_name.split('.')
|
||||||
|
package = '.'.join(module_path[:-1])
|
||||||
|
build_py = self.get_finalized_command('build_py')
|
||||||
|
package_dir = build_py.get_package_dir(package)
|
||||||
|
file_name = module_path[-1] + '.py'
|
||||||
|
generate_mod(os.path.join(package_dir, file_name))
|
||||||
|
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||||
|
|
||||||
|
def cffi_modules(dist, attr, value):
|
||||||
|
assert attr == 'cffi_modules'
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
value = [value]
|
||||||
|
|
||||||
|
for cffi_module in value:
|
||||||
|
add_cffi_module(dist, cffi_module)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,675 @@
|
||||||
|
#
|
||||||
|
# DEPRECATED: implementation for ffi.verify()
|
||||||
|
#
|
||||||
|
import sys, os
|
||||||
|
import types
|
||||||
|
|
||||||
|
from . import model
|
||||||
|
from .error import VerificationError
|
||||||
|
|
||||||
|
|
||||||
|
class VGenericEngine(object):
|
||||||
|
_class_key = 'g'
|
||||||
|
_gen_python_module = False
|
||||||
|
|
||||||
|
def __init__(self, verifier):
|
||||||
|
self.verifier = verifier
|
||||||
|
self.ffi = verifier.ffi
|
||||||
|
self.export_symbols = []
|
||||||
|
self._struct_pending_verification = {}
|
||||||
|
|
||||||
|
def patch_extension_kwds(self, kwds):
|
||||||
|
# add 'export_symbols' to the dictionary. Note that we add the
|
||||||
|
# list before filling it. When we fill it, it will thus also show
|
||||||
|
# up in kwds['export_symbols'].
|
||||||
|
kwds.setdefault('export_symbols', self.export_symbols)
|
||||||
|
|
||||||
|
def find_module(self, module_name, path, so_suffixes):
|
||||||
|
for so_suffix in so_suffixes:
|
||||||
|
basename = module_name + so_suffix
|
||||||
|
if path is None:
|
||||||
|
path = sys.path
|
||||||
|
for dirname in path:
|
||||||
|
filename = os.path.join(dirname, basename)
|
||||||
|
if os.path.isfile(filename):
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def collect_types(self):
|
||||||
|
pass # not needed in the generic engine
|
||||||
|
|
||||||
|
def _prnt(self, what=''):
|
||||||
|
self._f.write(what + '\n')
|
||||||
|
|
||||||
|
def write_source_to_f(self):
|
||||||
|
prnt = self._prnt
|
||||||
|
# first paste some standard set of lines that are mostly '#include'
|
||||||
|
prnt(cffimod_header)
|
||||||
|
# then paste the C source given by the user, verbatim.
|
||||||
|
prnt(self.verifier.preamble)
|
||||||
|
#
|
||||||
|
# call generate_gen_xxx_decl(), for every xxx found from
|
||||||
|
# ffi._parser._declarations. This generates all the functions.
|
||||||
|
self._generate('decl')
|
||||||
|
#
|
||||||
|
# on Windows, distutils insists on putting init_cffi_xyz in
|
||||||
|
# 'export_symbols', so instead of fighting it, just give up and
|
||||||
|
# give it one
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
prefix = 'PyInit_'
|
||||||
|
else:
|
||||||
|
prefix = 'init'
|
||||||
|
modname = self.verifier.get_module_name()
|
||||||
|
prnt("void %s%s(void) { }\n" % (prefix, modname))
|
||||||
|
|
||||||
|
def load_library(self, flags=0):
|
||||||
|
# import it with the CFFI backend
|
||||||
|
backend = self.ffi._backend
|
||||||
|
# needs to make a path that contains '/', on Posix
|
||||||
|
filename = os.path.join(os.curdir, self.verifier.modulefilename)
|
||||||
|
module = backend.load_library(filename, flags)
|
||||||
|
#
|
||||||
|
# call loading_gen_struct() to get the struct layout inferred by
|
||||||
|
# the C compiler
|
||||||
|
self._load(module, 'loading')
|
||||||
|
|
||||||
|
# build the FFILibrary class and instance, this is a module subclass
|
||||||
|
# because modules are expected to have usually-constant-attributes and
|
||||||
|
# in PyPy this means the JIT is able to treat attributes as constant,
|
||||||
|
# which we want.
|
||||||
|
class FFILibrary(types.ModuleType):
|
||||||
|
_cffi_generic_module = module
|
||||||
|
_cffi_ffi = self.ffi
|
||||||
|
_cffi_dir = []
|
||||||
|
def __dir__(self):
|
||||||
|
return FFILibrary._cffi_dir
|
||||||
|
library = FFILibrary("")
|
||||||
|
#
|
||||||
|
# finally, call the loaded_gen_xxx() functions. This will set
|
||||||
|
# up the 'library' object.
|
||||||
|
self._load(module, 'loaded', library=library)
|
||||||
|
return library
|
||||||
|
|
||||||
|
def _get_declarations(self):
|
||||||
|
lst = [(key, tp) for (key, (tp, qual)) in
|
||||||
|
self.ffi._parser._declarations.items()]
|
||||||
|
lst.sort()
|
||||||
|
return lst
|
||||||
|
|
||||||
|
def _generate(self, step_name):
|
||||||
|
for name, tp in self._get_declarations():
|
||||||
|
kind, realname = name.split(' ', 1)
|
||||||
|
try:
|
||||||
|
method = getattr(self, '_generate_gen_%s_%s' % (kind,
|
||||||
|
step_name))
|
||||||
|
except AttributeError:
|
||||||
|
raise VerificationError(
|
||||||
|
"not implemented in verify(): %r" % name)
|
||||||
|
try:
|
||||||
|
method(tp, realname)
|
||||||
|
except Exception as e:
|
||||||
|
model.attach_exception_info(e, name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _load(self, module, step_name, **kwds):
|
||||||
|
for name, tp in self._get_declarations():
|
||||||
|
kind, realname = name.split(' ', 1)
|
||||||
|
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
|
||||||
|
try:
|
||||||
|
method(tp, realname, module, **kwds)
|
||||||
|
except Exception as e:
|
||||||
|
model.attach_exception_info(e, name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _generate_nothing(self, tp, name):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _loaded_noop(self, tp, name, module, **kwds):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# typedefs: generates no code so far
|
||||||
|
|
||||||
|
_generate_gen_typedef_decl = _generate_nothing
|
||||||
|
_loading_gen_typedef = _loaded_noop
|
||||||
|
_loaded_gen_typedef = _loaded_noop
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# function declarations
|
||||||
|
|
||||||
|
def _generate_gen_function_decl(self, tp, name):
|
||||||
|
assert isinstance(tp, model.FunctionPtrType)
|
||||||
|
if tp.ellipsis:
|
||||||
|
# cannot support vararg functions better than this: check for its
|
||||||
|
# exact type (including the fixed arguments), and build it as a
|
||||||
|
# constant function pointer (no _cffi_f_%s wrapper)
|
||||||
|
self._generate_gen_const(False, name, tp)
|
||||||
|
return
|
||||||
|
prnt = self._prnt
|
||||||
|
numargs = len(tp.args)
|
||||||
|
argnames = []
|
||||||
|
for i, type in enumerate(tp.args):
|
||||||
|
indirection = ''
|
||||||
|
if isinstance(type, model.StructOrUnion):
|
||||||
|
indirection = '*'
|
||||||
|
argnames.append('%sx%d' % (indirection, i))
|
||||||
|
context = 'argument of %s' % name
|
||||||
|
arglist = [type.get_c_name(' %s' % arg, context)
|
||||||
|
for type, arg in zip(tp.args, argnames)]
|
||||||
|
tpresult = tp.result
|
||||||
|
if isinstance(tpresult, model.StructOrUnion):
|
||||||
|
arglist.insert(0, tpresult.get_c_name(' *r', context))
|
||||||
|
tpresult = model.void_type
|
||||||
|
arglist = ', '.join(arglist) or 'void'
|
||||||
|
wrappername = '_cffi_f_%s' % name
|
||||||
|
self.export_symbols.append(wrappername)
|
||||||
|
if tp.abi:
|
||||||
|
abi = tp.abi + ' '
|
||||||
|
else:
|
||||||
|
abi = ''
|
||||||
|
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
|
||||||
|
context = 'result of %s' % name
|
||||||
|
prnt(tpresult.get_c_name(funcdecl, context))
|
||||||
|
prnt('{')
|
||||||
|
#
|
||||||
|
if isinstance(tp.result, model.StructOrUnion):
|
||||||
|
result_code = '*r = '
|
||||||
|
elif not isinstance(tp.result, model.VoidType):
|
||||||
|
result_code = 'return '
|
||||||
|
else:
|
||||||
|
result_code = ''
|
||||||
|
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
_loading_gen_function = _loaded_noop
|
||||||
|
|
||||||
|
def _loaded_gen_function(self, tp, name, module, library):
|
||||||
|
assert isinstance(tp, model.FunctionPtrType)
|
||||||
|
if tp.ellipsis:
|
||||||
|
newfunction = self._load_constant(False, tp, name, module)
|
||||||
|
else:
|
||||||
|
indirections = []
|
||||||
|
base_tp = tp
|
||||||
|
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
|
||||||
|
or isinstance(tp.result, model.StructOrUnion)):
|
||||||
|
indirect_args = []
|
||||||
|
for i, typ in enumerate(tp.args):
|
||||||
|
if isinstance(typ, model.StructOrUnion):
|
||||||
|
typ = model.PointerType(typ)
|
||||||
|
indirections.append((i, typ))
|
||||||
|
indirect_args.append(typ)
|
||||||
|
indirect_result = tp.result
|
||||||
|
if isinstance(indirect_result, model.StructOrUnion):
|
||||||
|
if indirect_result.fldtypes is None:
|
||||||
|
raise TypeError("'%s' is used as result type, "
|
||||||
|
"but is opaque" % (
|
||||||
|
indirect_result._get_c_name(),))
|
||||||
|
indirect_result = model.PointerType(indirect_result)
|
||||||
|
indirect_args.insert(0, indirect_result)
|
||||||
|
indirections.insert(0, ("result", indirect_result))
|
||||||
|
indirect_result = model.void_type
|
||||||
|
tp = model.FunctionPtrType(tuple(indirect_args),
|
||||||
|
indirect_result, tp.ellipsis)
|
||||||
|
BFunc = self.ffi._get_cached_btype(tp)
|
||||||
|
wrappername = '_cffi_f_%s' % name
|
||||||
|
newfunction = module.load_function(BFunc, wrappername)
|
||||||
|
for i, typ in indirections:
|
||||||
|
newfunction = self._make_struct_wrapper(newfunction, i, typ,
|
||||||
|
base_tp)
|
||||||
|
setattr(library, name, newfunction)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
|
||||||
|
backend = self.ffi._backend
|
||||||
|
BType = self.ffi._get_cached_btype(tp)
|
||||||
|
if i == "result":
|
||||||
|
ffi = self.ffi
|
||||||
|
def newfunc(*args):
|
||||||
|
res = ffi.new(BType)
|
||||||
|
oldfunc(res, *args)
|
||||||
|
return res[0]
|
||||||
|
else:
|
||||||
|
def newfunc(*args):
|
||||||
|
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
|
||||||
|
return oldfunc(*args)
|
||||||
|
newfunc._cffi_base_type = base_tp
|
||||||
|
return newfunc
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# named structs
|
||||||
|
|
||||||
|
def _generate_gen_struct_decl(self, tp, name):
|
||||||
|
assert name == tp.name
|
||||||
|
self._generate_struct_or_union_decl(tp, 'struct', name)
|
||||||
|
|
||||||
|
def _loading_gen_struct(self, tp, name, module):
|
||||||
|
self._loading_struct_or_union(tp, 'struct', name, module)
|
||||||
|
|
||||||
|
def _loaded_gen_struct(self, tp, name, module, **kwds):
|
||||||
|
self._loaded_struct_or_union(tp)
|
||||||
|
|
||||||
|
def _generate_gen_union_decl(self, tp, name):
|
||||||
|
assert name == tp.name
|
||||||
|
self._generate_struct_or_union_decl(tp, 'union', name)
|
||||||
|
|
||||||
|
def _loading_gen_union(self, tp, name, module):
|
||||||
|
self._loading_struct_or_union(tp, 'union', name, module)
|
||||||
|
|
||||||
|
def _loaded_gen_union(self, tp, name, module, **kwds):
|
||||||
|
self._loaded_struct_or_union(tp)
|
||||||
|
|
||||||
|
def _generate_struct_or_union_decl(self, tp, prefix, name):
|
||||||
|
if tp.fldnames is None:
|
||||||
|
return # nothing to do with opaque structs
|
||||||
|
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
|
||||||
|
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||||
|
cname = ('%s %s' % (prefix, name)).strip()
|
||||||
|
#
|
||||||
|
prnt = self._prnt
|
||||||
|
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
||||||
|
prnt('{')
|
||||||
|
prnt(' /* only to generate compile-time warnings or errors */')
|
||||||
|
prnt(' (void)p;')
|
||||||
|
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||||
|
if (isinstance(ftype, model.PrimitiveType)
|
||||||
|
and ftype.is_integer_type()) or fbitsize >= 0:
|
||||||
|
# accept all integers, but complain on float or double
|
||||||
|
prnt(' (void)((p->%s) << 1);' % fname)
|
||||||
|
else:
|
||||||
|
# only accept exactly the type declared.
|
||||||
|
try:
|
||||||
|
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
||||||
|
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
||||||
|
fname))
|
||||||
|
except VerificationError as e:
|
||||||
|
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
||||||
|
prnt('}')
|
||||||
|
self.export_symbols.append(layoutfuncname)
|
||||||
|
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
|
||||||
|
prnt('{')
|
||||||
|
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
|
||||||
|
prnt(' static intptr_t nums[] = {')
|
||||||
|
prnt(' sizeof(%s),' % cname)
|
||||||
|
prnt(' offsetof(struct _cffi_aligncheck, y),')
|
||||||
|
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||||
|
if fbitsize >= 0:
|
||||||
|
continue # xxx ignore fbitsize for now
|
||||||
|
prnt(' offsetof(%s, %s),' % (cname, fname))
|
||||||
|
if isinstance(ftype, model.ArrayType) and ftype.length is None:
|
||||||
|
prnt(' 0, /* %s */' % ftype._get_c_name())
|
||||||
|
else:
|
||||||
|
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
|
||||||
|
prnt(' -1')
|
||||||
|
prnt(' };')
|
||||||
|
prnt(' return nums[i];')
|
||||||
|
prnt(' /* the next line is not executed, but compiled */')
|
||||||
|
prnt(' %s(0);' % (checkfuncname,))
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
def _loading_struct_or_union(self, tp, prefix, name, module):
|
||||||
|
if tp.fldnames is None:
|
||||||
|
return # nothing to do with opaque structs
|
||||||
|
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||||
|
#
|
||||||
|
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
|
||||||
|
function = module.load_function(BFunc, layoutfuncname)
|
||||||
|
layout = []
|
||||||
|
num = 0
|
||||||
|
while True:
|
||||||
|
x = function(num)
|
||||||
|
if x < 0: break
|
||||||
|
layout.append(x)
|
||||||
|
num += 1
|
||||||
|
if isinstance(tp, model.StructOrUnion) and tp.partial:
|
||||||
|
# use the function()'s sizes and offsets to guide the
|
||||||
|
# layout of the struct
|
||||||
|
totalsize = layout[0]
|
||||||
|
totalalignment = layout[1]
|
||||||
|
fieldofs = layout[2::2]
|
||||||
|
fieldsize = layout[3::2]
|
||||||
|
tp.force_flatten()
|
||||||
|
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
|
||||||
|
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
|
||||||
|
else:
|
||||||
|
cname = ('%s %s' % (prefix, name)).strip()
|
||||||
|
self._struct_pending_verification[tp] = layout, cname
|
||||||
|
|
||||||
|
def _loaded_struct_or_union(self, tp):
|
||||||
|
if tp.fldnames is None:
|
||||||
|
return # nothing to do with opaque structs
|
||||||
|
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
|
||||||
|
|
||||||
|
if tp in self._struct_pending_verification:
|
||||||
|
# check that the layout sizes and offsets match the real ones
|
||||||
|
def check(realvalue, expectedvalue, msg):
|
||||||
|
if realvalue != expectedvalue:
|
||||||
|
raise VerificationError(
|
||||||
|
"%s (we have %d, but C compiler says %d)"
|
||||||
|
% (msg, expectedvalue, realvalue))
|
||||||
|
ffi = self.ffi
|
||||||
|
BStruct = ffi._get_cached_btype(tp)
|
||||||
|
layout, cname = self._struct_pending_verification.pop(tp)
|
||||||
|
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
|
||||||
|
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
|
||||||
|
i = 2
|
||||||
|
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||||
|
if fbitsize >= 0:
|
||||||
|
continue # xxx ignore fbitsize for now
|
||||||
|
check(layout[i], ffi.offsetof(BStruct, fname),
|
||||||
|
"wrong offset for field %r" % (fname,))
|
||||||
|
if layout[i+1] != 0:
|
||||||
|
BField = ffi._get_cached_btype(ftype)
|
||||||
|
check(layout[i+1], ffi.sizeof(BField),
|
||||||
|
"wrong size for field %r" % (fname,))
|
||||||
|
i += 2
|
||||||
|
assert i == len(layout)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# 'anonymous' declarations. These are produced for anonymous structs
|
||||||
|
# or unions; the 'name' is obtained by a typedef.
|
||||||
|
|
||||||
|
def _generate_gen_anonymous_decl(self, tp, name):
|
||||||
|
if isinstance(tp, model.EnumType):
|
||||||
|
self._generate_gen_enum_decl(tp, name, '')
|
||||||
|
else:
|
||||||
|
self._generate_struct_or_union_decl(tp, '', name)
|
||||||
|
|
||||||
|
def _loading_gen_anonymous(self, tp, name, module):
|
||||||
|
if isinstance(tp, model.EnumType):
|
||||||
|
self._loading_gen_enum(tp, name, module, '')
|
||||||
|
else:
|
||||||
|
self._loading_struct_or_union(tp, '', name, module)
|
||||||
|
|
||||||
|
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
|
||||||
|
if isinstance(tp, model.EnumType):
|
||||||
|
self._loaded_gen_enum(tp, name, module, **kwds)
|
||||||
|
else:
|
||||||
|
self._loaded_struct_or_union(tp)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# constants, likely declared with '#define'
|
||||||
|
|
||||||
|
def _generate_gen_const(self, is_int, name, tp=None, category='const',
|
||||||
|
check_value=None):
|
||||||
|
prnt = self._prnt
|
||||||
|
funcname = '_cffi_%s_%s' % (category, name)
|
||||||
|
self.export_symbols.append(funcname)
|
||||||
|
if check_value is not None:
|
||||||
|
assert is_int
|
||||||
|
assert category == 'const'
|
||||||
|
prnt('int %s(char *out_error)' % funcname)
|
||||||
|
prnt('{')
|
||||||
|
self._check_int_constant_value(name, check_value)
|
||||||
|
prnt(' return 0;')
|
||||||
|
prnt('}')
|
||||||
|
elif is_int:
|
||||||
|
assert category == 'const'
|
||||||
|
prnt('int %s(long long *out_value)' % funcname)
|
||||||
|
prnt('{')
|
||||||
|
prnt(' *out_value = (long long)(%s);' % (name,))
|
||||||
|
prnt(' return (%s) <= 0;' % (name,))
|
||||||
|
prnt('}')
|
||||||
|
else:
|
||||||
|
assert tp is not None
|
||||||
|
assert check_value is None
|
||||||
|
if category == 'var':
|
||||||
|
ampersand = '&'
|
||||||
|
else:
|
||||||
|
ampersand = ''
|
||||||
|
extra = ''
|
||||||
|
if category == 'const' and isinstance(tp, model.StructOrUnion):
|
||||||
|
extra = 'const *'
|
||||||
|
ampersand = '&'
|
||||||
|
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
|
||||||
|
prnt('{')
|
||||||
|
prnt(' return (%s%s);' % (ampersand, name))
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
def _generate_gen_constant_decl(self, tp, name):
|
||||||
|
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||||
|
self._generate_gen_const(is_int, name, tp)
|
||||||
|
|
||||||
|
_loading_gen_constant = _loaded_noop
|
||||||
|
|
||||||
|
def _load_constant(self, is_int, tp, name, module, check_value=None):
|
||||||
|
funcname = '_cffi_const_%s' % name
|
||||||
|
if check_value is not None:
|
||||||
|
assert is_int
|
||||||
|
self._load_known_int_constant(module, funcname)
|
||||||
|
value = check_value
|
||||||
|
elif is_int:
|
||||||
|
BType = self.ffi._typeof_locked("long long*")[0]
|
||||||
|
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
p = self.ffi.new(BType)
|
||||||
|
negative = function(p)
|
||||||
|
value = int(p[0])
|
||||||
|
if value < 0 and not negative:
|
||||||
|
BLongLong = self.ffi._typeof_locked("long long")[0]
|
||||||
|
value += (1 << (8*self.ffi.sizeof(BLongLong)))
|
||||||
|
else:
|
||||||
|
assert check_value is None
|
||||||
|
fntypeextra = '(*)(void)'
|
||||||
|
if isinstance(tp, model.StructOrUnion):
|
||||||
|
fntypeextra = '*' + fntypeextra
|
||||||
|
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
value = function()
|
||||||
|
if isinstance(tp, model.StructOrUnion):
|
||||||
|
value = value[0]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _loaded_gen_constant(self, tp, name, module, library):
|
||||||
|
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||||
|
value = self._load_constant(is_int, tp, name, module)
|
||||||
|
setattr(library, name, value)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# enums
|
||||||
|
|
||||||
|
def _check_int_constant_value(self, name, value):
|
||||||
|
prnt = self._prnt
|
||||||
|
if value <= 0:
|
||||||
|
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
|
||||||
|
name, name, value))
|
||||||
|
else:
|
||||||
|
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
|
||||||
|
name, name, value))
|
||||||
|
prnt(' char buf[64];')
|
||||||
|
prnt(' if ((%s) <= 0)' % name)
|
||||||
|
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
|
||||||
|
prnt(' else')
|
||||||
|
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
|
||||||
|
name)
|
||||||
|
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
|
||||||
|
prnt(' "%s", buf, "%d");' % (name[:100], value))
|
||||||
|
prnt(' return -1;')
|
||||||
|
prnt(' }')
|
||||||
|
|
||||||
|
def _load_known_int_constant(self, module, funcname):
|
||||||
|
BType = self.ffi._typeof_locked("char[]")[0]
|
||||||
|
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
p = self.ffi.new(BType, 256)
|
||||||
|
if function(p) < 0:
|
||||||
|
error = self.ffi.string(p)
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
error = str(error, 'utf-8')
|
||||||
|
raise VerificationError(error)
|
||||||
|
|
||||||
|
def _enum_funcname(self, prefix, name):
|
||||||
|
# "$enum_$1" => "___D_enum____D_1"
|
||||||
|
name = name.replace('$', '___D_')
|
||||||
|
return '_cffi_e_%s_%s' % (prefix, name)
|
||||||
|
|
||||||
|
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
|
||||||
|
if tp.partial:
|
||||||
|
for enumerator in tp.enumerators:
|
||||||
|
self._generate_gen_const(True, enumerator)
|
||||||
|
return
|
||||||
|
#
|
||||||
|
funcname = self._enum_funcname(prefix, name)
|
||||||
|
self.export_symbols.append(funcname)
|
||||||
|
prnt = self._prnt
|
||||||
|
prnt('int %s(char *out_error)' % funcname)
|
||||||
|
prnt('{')
|
||||||
|
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||||
|
self._check_int_constant_value(enumerator, enumvalue)
|
||||||
|
prnt(' return 0;')
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
|
||||||
|
if tp.partial:
|
||||||
|
enumvalues = [self._load_constant(True, tp, enumerator, module)
|
||||||
|
for enumerator in tp.enumerators]
|
||||||
|
tp.enumvalues = tuple(enumvalues)
|
||||||
|
tp.partial_resolved = True
|
||||||
|
else:
|
||||||
|
funcname = self._enum_funcname(prefix, name)
|
||||||
|
self._load_known_int_constant(module, funcname)
|
||||||
|
|
||||||
|
def _loaded_gen_enum(self, tp, name, module, library):
|
||||||
|
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||||
|
setattr(library, enumerator, enumvalue)
|
||||||
|
type(library)._cffi_dir.append(enumerator)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# macros: for now only for integers
|
||||||
|
|
||||||
|
def _generate_gen_macro_decl(self, tp, name):
|
||||||
|
if tp == '...':
|
||||||
|
check_value = None
|
||||||
|
else:
|
||||||
|
check_value = tp # an integer
|
||||||
|
self._generate_gen_const(True, name, check_value=check_value)
|
||||||
|
|
||||||
|
_loading_gen_macro = _loaded_noop
|
||||||
|
|
||||||
|
def _loaded_gen_macro(self, tp, name, module, library):
|
||||||
|
if tp == '...':
|
||||||
|
check_value = None
|
||||||
|
else:
|
||||||
|
check_value = tp # an integer
|
||||||
|
value = self._load_constant(True, tp, name, module,
|
||||||
|
check_value=check_value)
|
||||||
|
setattr(library, name, value)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# global variables
|
||||||
|
|
||||||
|
def _generate_gen_variable_decl(self, tp, name):
|
||||||
|
if isinstance(tp, model.ArrayType):
|
||||||
|
if tp.length_is_unknown():
|
||||||
|
prnt = self._prnt
|
||||||
|
funcname = '_cffi_sizeof_%s' % (name,)
|
||||||
|
self.export_symbols.append(funcname)
|
||||||
|
prnt("size_t %s(void)" % funcname)
|
||||||
|
prnt("{")
|
||||||
|
prnt(" return sizeof(%s);" % (name,))
|
||||||
|
prnt("}")
|
||||||
|
tp_ptr = model.PointerType(tp.item)
|
||||||
|
self._generate_gen_const(False, name, tp_ptr)
|
||||||
|
else:
|
||||||
|
tp_ptr = model.PointerType(tp)
|
||||||
|
self._generate_gen_const(False, name, tp_ptr, category='var')
|
||||||
|
|
||||||
|
_loading_gen_variable = _loaded_noop
|
||||||
|
|
||||||
|
def _loaded_gen_variable(self, tp, name, module, library):
|
||||||
|
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
|
||||||
|
# sense that "a=..." is forbidden
|
||||||
|
if tp.length_is_unknown():
|
||||||
|
funcname = '_cffi_sizeof_%s' % (name,)
|
||||||
|
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
size = function()
|
||||||
|
BItemType = self.ffi._get_cached_btype(tp.item)
|
||||||
|
length, rest = divmod(size, self.ffi.sizeof(BItemType))
|
||||||
|
if rest != 0:
|
||||||
|
raise VerificationError(
|
||||||
|
"bad size: %r does not seem to be an array of %s" %
|
||||||
|
(name, tp.item))
|
||||||
|
tp = tp.resolve_length(length)
|
||||||
|
tp_ptr = model.PointerType(tp.item)
|
||||||
|
value = self._load_constant(False, tp_ptr, name, module)
|
||||||
|
# 'value' is a <cdata 'type *'> which we have to replace with
|
||||||
|
# a <cdata 'type[N]'> if the N is actually known
|
||||||
|
if tp.length is not None:
|
||||||
|
BArray = self.ffi._get_cached_btype(tp)
|
||||||
|
value = self.ffi.cast(BArray, value)
|
||||||
|
setattr(library, name, value)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
return
|
||||||
|
# remove ptr=<cdata 'int *'> from the library instance, and replace
|
||||||
|
# it by a property on the class, which reads/writes into ptr[0].
|
||||||
|
funcname = '_cffi_var_%s' % name
|
||||||
|
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
ptr = function()
|
||||||
|
def getter(library):
|
||||||
|
return ptr[0]
|
||||||
|
def setter(library, value):
|
||||||
|
ptr[0] = value
|
||||||
|
setattr(type(library), name, property(getter, setter))
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
cffimod_header = r'''
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <stdarg.h>
|
||||||
|
#include <errno.h>
|
||||||
|
#include <sys/types.h> /* XXX for ssize_t on some platforms */
|
||||||
|
|
||||||
|
/* this block of #ifs should be kept exactly identical between
|
||||||
|
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||||
|
and cffi/_cffi_include.h */
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
# include <malloc.h> /* for alloca() */
|
||||||
|
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||||
|
typedef __int8 int8_t;
|
||||||
|
typedef __int16 int16_t;
|
||||||
|
typedef __int32 int32_t;
|
||||||
|
typedef __int64 int64_t;
|
||||||
|
typedef unsigned __int8 uint8_t;
|
||||||
|
typedef unsigned __int16 uint16_t;
|
||||||
|
typedef unsigned __int32 uint32_t;
|
||||||
|
typedef unsigned __int64 uint64_t;
|
||||||
|
typedef __int8 int_least8_t;
|
||||||
|
typedef __int16 int_least16_t;
|
||||||
|
typedef __int32 int_least32_t;
|
||||||
|
typedef __int64 int_least64_t;
|
||||||
|
typedef unsigned __int8 uint_least8_t;
|
||||||
|
typedef unsigned __int16 uint_least16_t;
|
||||||
|
typedef unsigned __int32 uint_least32_t;
|
||||||
|
typedef unsigned __int64 uint_least64_t;
|
||||||
|
typedef __int8 int_fast8_t;
|
||||||
|
typedef __int16 int_fast16_t;
|
||||||
|
typedef __int32 int_fast32_t;
|
||||||
|
typedef __int64 int_fast64_t;
|
||||||
|
typedef unsigned __int8 uint_fast8_t;
|
||||||
|
typedef unsigned __int16 uint_fast16_t;
|
||||||
|
typedef unsigned __int32 uint_fast32_t;
|
||||||
|
typedef unsigned __int64 uint_fast64_t;
|
||||||
|
typedef __int64 intmax_t;
|
||||||
|
typedef unsigned __int64 uintmax_t;
|
||||||
|
# else
|
||||||
|
# include <stdint.h>
|
||||||
|
# endif
|
||||||
|
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||||
|
# ifndef __cplusplus
|
||||||
|
typedef unsigned char _Bool;
|
||||||
|
# endif
|
||||||
|
# endif
|
||||||
|
#else
|
||||||
|
# include <stdint.h>
|
||||||
|
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||||
|
# include <alloca.h>
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
'''
|
|
@ -0,0 +1,307 @@
|
||||||
|
#
|
||||||
|
# DEPRECATED: implementation for ffi.verify()
|
||||||
|
#
|
||||||
|
import sys, os, binascii, shutil, io
|
||||||
|
from . import __version_verifier_modules__
|
||||||
|
from . import ffiplatform
|
||||||
|
from .error import VerificationError
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 3):
|
||||||
|
import importlib.machinery
|
||||||
|
def _extension_suffixes():
|
||||||
|
return importlib.machinery.EXTENSION_SUFFIXES[:]
|
||||||
|
else:
|
||||||
|
import imp
|
||||||
|
def _extension_suffixes():
|
||||||
|
return [suffix for suffix, _, type in imp.get_suffixes()
|
||||||
|
if type == imp.C_EXTENSION]
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
NativeIO = io.StringIO
|
||||||
|
else:
|
||||||
|
class NativeIO(io.BytesIO):
|
||||||
|
def write(self, s):
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
s = s.encode('ascii')
|
||||||
|
super(NativeIO, self).write(s)
|
||||||
|
|
||||||
|
|
||||||
|
class Verifier(object):
|
||||||
|
|
||||||
|
def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
|
||||||
|
ext_package=None, tag='', force_generic_engine=False,
|
||||||
|
source_extension='.c', flags=None, relative_to=None, **kwds):
|
||||||
|
if ffi._parser._uses_new_feature:
|
||||||
|
raise VerificationError(
|
||||||
|
"feature not supported with ffi.verify(), but only "
|
||||||
|
"with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
|
||||||
|
self.ffi = ffi
|
||||||
|
self.preamble = preamble
|
||||||
|
if not modulename:
|
||||||
|
flattened_kwds = ffiplatform.flatten(kwds)
|
||||||
|
vengine_class = _locate_engine_class(ffi, force_generic_engine)
|
||||||
|
self._vengine = vengine_class(self)
|
||||||
|
self._vengine.patch_extension_kwds(kwds)
|
||||||
|
self.flags = flags
|
||||||
|
self.kwds = self.make_relative_to(kwds, relative_to)
|
||||||
|
#
|
||||||
|
if modulename:
|
||||||
|
if tag:
|
||||||
|
raise TypeError("can't specify both 'modulename' and 'tag'")
|
||||||
|
else:
|
||||||
|
key = '\x00'.join(['%d.%d' % sys.version_info[:2],
|
||||||
|
__version_verifier_modules__,
|
||||||
|
preamble, flattened_kwds] +
|
||||||
|
ffi._cdefsources)
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
key = key.encode('utf-8')
|
||||||
|
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
|
||||||
|
k1 = k1.lstrip('0x').rstrip('L')
|
||||||
|
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
|
||||||
|
k2 = k2.lstrip('0').rstrip('L')
|
||||||
|
modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
|
||||||
|
k1, k2)
|
||||||
|
suffix = _get_so_suffixes()[0]
|
||||||
|
self.tmpdir = tmpdir or _caller_dir_pycache()
|
||||||
|
self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
|
||||||
|
self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
|
||||||
|
self.ext_package = ext_package
|
||||||
|
self._has_source = False
|
||||||
|
self._has_module = False
|
||||||
|
|
||||||
|
def write_source(self, file=None):
|
||||||
|
"""Write the C source code. It is produced in 'self.sourcefilename',
|
||||||
|
which can be tweaked beforehand."""
|
||||||
|
with self.ffi._lock:
|
||||||
|
if self._has_source and file is None:
|
||||||
|
raise VerificationError(
|
||||||
|
"source code already written")
|
||||||
|
self._write_source(file)
|
||||||
|
|
||||||
|
def compile_module(self):
|
||||||
|
"""Write the C source code (if not done already) and compile it.
|
||||||
|
This produces a dynamic link library in 'self.modulefilename'."""
|
||||||
|
with self.ffi._lock:
|
||||||
|
if self._has_module:
|
||||||
|
raise VerificationError("module already compiled")
|
||||||
|
if not self._has_source:
|
||||||
|
self._write_source()
|
||||||
|
self._compile_module()
|
||||||
|
|
||||||
|
def load_library(self):
|
||||||
|
"""Get a C module from this Verifier instance.
|
||||||
|
Returns an instance of a FFILibrary class that behaves like the
|
||||||
|
objects returned by ffi.dlopen(), but that delegates all
|
||||||
|
operations to the C module. If necessary, the C code is written
|
||||||
|
and compiled first.
|
||||||
|
"""
|
||||||
|
with self.ffi._lock:
|
||||||
|
if not self._has_module:
|
||||||
|
self._locate_module()
|
||||||
|
if not self._has_module:
|
||||||
|
if not self._has_source:
|
||||||
|
self._write_source()
|
||||||
|
self._compile_module()
|
||||||
|
return self._load_library()
|
||||||
|
|
||||||
|
def get_module_name(self):
|
||||||
|
basename = os.path.basename(self.modulefilename)
|
||||||
|
# kill both the .so extension and the other .'s, as introduced
|
||||||
|
# by Python 3: 'basename.cpython-33m.so'
|
||||||
|
basename = basename.split('.', 1)[0]
|
||||||
|
# and the _d added in Python 2 debug builds --- but try to be
|
||||||
|
# conservative and not kill a legitimate _d
|
||||||
|
if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
|
||||||
|
basename = basename[:-2]
|
||||||
|
return basename
|
||||||
|
|
||||||
|
def get_extension(self):
|
||||||
|
ffiplatform._hack_at_distutils() # backward compatibility hack
|
||||||
|
if not self._has_source:
|
||||||
|
with self.ffi._lock:
|
||||||
|
if not self._has_source:
|
||||||
|
self._write_source()
|
||||||
|
sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
|
||||||
|
modname = self.get_module_name()
|
||||||
|
return ffiplatform.get_extension(sourcename, modname, **self.kwds)
|
||||||
|
|
||||||
|
def generates_python_module(self):
|
||||||
|
return self._vengine._gen_python_module
|
||||||
|
|
||||||
|
def make_relative_to(self, kwds, relative_to):
|
||||||
|
if relative_to and os.path.dirname(relative_to):
|
||||||
|
dirname = os.path.dirname(relative_to)
|
||||||
|
kwds = kwds.copy()
|
||||||
|
for key in ffiplatform.LIST_OF_FILE_NAMES:
|
||||||
|
if key in kwds:
|
||||||
|
lst = kwds[key]
|
||||||
|
if not isinstance(lst, (list, tuple)):
|
||||||
|
raise TypeError("keyword '%s' should be a list or tuple"
|
||||||
|
% (key,))
|
||||||
|
lst = [os.path.join(dirname, fn) for fn in lst]
|
||||||
|
kwds[key] = lst
|
||||||
|
return kwds
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
|
||||||
|
def _locate_module(self):
|
||||||
|
if not os.path.isfile(self.modulefilename):
|
||||||
|
if self.ext_package:
|
||||||
|
try:
|
||||||
|
pkg = __import__(self.ext_package, None, None, ['__doc__'])
|
||||||
|
except ImportError:
|
||||||
|
return # cannot import the package itself, give up
|
||||||
|
# (e.g. it might be called differently before installation)
|
||||||
|
path = pkg.__path__
|
||||||
|
else:
|
||||||
|
path = None
|
||||||
|
filename = self._vengine.find_module(self.get_module_name(), path,
|
||||||
|
_get_so_suffixes())
|
||||||
|
if filename is None:
|
||||||
|
return
|
||||||
|
self.modulefilename = filename
|
||||||
|
self._vengine.collect_types()
|
||||||
|
self._has_module = True
|
||||||
|
|
||||||
|
def _write_source_to(self, file):
|
||||||
|
self._vengine._f = file
|
||||||
|
try:
|
||||||
|
self._vengine.write_source_to_f()
|
||||||
|
finally:
|
||||||
|
del self._vengine._f
|
||||||
|
|
||||||
|
def _write_source(self, file=None):
|
||||||
|
if file is not None:
|
||||||
|
self._write_source_to(file)
|
||||||
|
else:
|
||||||
|
# Write our source file to an in memory file.
|
||||||
|
f = NativeIO()
|
||||||
|
self._write_source_to(f)
|
||||||
|
source_data = f.getvalue()
|
||||||
|
|
||||||
|
# Determine if this matches the current file
|
||||||
|
if os.path.exists(self.sourcefilename):
|
||||||
|
with open(self.sourcefilename, "r") as fp:
|
||||||
|
needs_written = not (fp.read() == source_data)
|
||||||
|
else:
|
||||||
|
needs_written = True
|
||||||
|
|
||||||
|
# Actually write the file out if it doesn't match
|
||||||
|
if needs_written:
|
||||||
|
_ensure_dir(self.sourcefilename)
|
||||||
|
with open(self.sourcefilename, "w") as fp:
|
||||||
|
fp.write(source_data)
|
||||||
|
|
||||||
|
# Set this flag
|
||||||
|
self._has_source = True
|
||||||
|
|
||||||
|
def _compile_module(self):
|
||||||
|
# compile this C source
|
||||||
|
tmpdir = os.path.dirname(self.sourcefilename)
|
||||||
|
outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
|
||||||
|
try:
|
||||||
|
same = ffiplatform.samefile(outputfilename, self.modulefilename)
|
||||||
|
except OSError:
|
||||||
|
same = False
|
||||||
|
if not same:
|
||||||
|
_ensure_dir(self.modulefilename)
|
||||||
|
shutil.move(outputfilename, self.modulefilename)
|
||||||
|
self._has_module = True
|
||||||
|
|
||||||
|
def _load_library(self):
|
||||||
|
assert self._has_module
|
||||||
|
if self.flags is not None:
|
||||||
|
return self._vengine.load_library(self.flags)
|
||||||
|
else:
|
||||||
|
return self._vengine.load_library()
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
|
||||||
|
_FORCE_GENERIC_ENGINE = False # for tests
|
||||||
|
|
||||||
|
def _locate_engine_class(ffi, force_generic_engine):
|
||||||
|
if _FORCE_GENERIC_ENGINE:
|
||||||
|
force_generic_engine = True
|
||||||
|
if not force_generic_engine:
|
||||||
|
if '__pypy__' in sys.builtin_module_names:
|
||||||
|
force_generic_engine = True
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
import _cffi_backend
|
||||||
|
except ImportError:
|
||||||
|
_cffi_backend = '?'
|
||||||
|
if ffi._backend is not _cffi_backend:
|
||||||
|
force_generic_engine = True
|
||||||
|
if force_generic_engine:
|
||||||
|
from . import vengine_gen
|
||||||
|
return vengine_gen.VGenericEngine
|
||||||
|
else:
|
||||||
|
from . import vengine_cpy
|
||||||
|
return vengine_cpy.VCPythonEngine
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
|
||||||
|
_TMPDIR = None
|
||||||
|
|
||||||
|
def _caller_dir_pycache():
|
||||||
|
if _TMPDIR:
|
||||||
|
return _TMPDIR
|
||||||
|
result = os.environ.get('CFFI_TMPDIR')
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
filename = sys._getframe(2).f_code.co_filename
|
||||||
|
return os.path.abspath(os.path.join(os.path.dirname(filename),
|
||||||
|
'__pycache__'))
|
||||||
|
|
||||||
|
def set_tmpdir(dirname):
|
||||||
|
"""Set the temporary directory to use instead of __pycache__."""
|
||||||
|
global _TMPDIR
|
||||||
|
_TMPDIR = dirname
|
||||||
|
|
||||||
|
def cleanup_tmpdir(tmpdir=None, keep_so=False):
|
||||||
|
"""Clean up the temporary directory by removing all files in it
|
||||||
|
called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
|
||||||
|
tmpdir = tmpdir or _caller_dir_pycache()
|
||||||
|
try:
|
||||||
|
filelist = os.listdir(tmpdir)
|
||||||
|
except OSError:
|
||||||
|
return
|
||||||
|
if keep_so:
|
||||||
|
suffix = '.c' # only remove .c files
|
||||||
|
else:
|
||||||
|
suffix = _get_so_suffixes()[0].lower()
|
||||||
|
for fn in filelist:
|
||||||
|
if fn.lower().startswith('_cffi_') and (
|
||||||
|
fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
|
||||||
|
try:
|
||||||
|
os.unlink(os.path.join(tmpdir, fn))
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
clean_dir = [os.path.join(tmpdir, 'build')]
|
||||||
|
for dir in clean_dir:
|
||||||
|
try:
|
||||||
|
for fn in os.listdir(dir):
|
||||||
|
fn = os.path.join(dir, fn)
|
||||||
|
if os.path.isdir(fn):
|
||||||
|
clean_dir.append(fn)
|
||||||
|
else:
|
||||||
|
os.unlink(fn)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_so_suffixes():
|
||||||
|
suffixes = _extension_suffixes()
|
||||||
|
if not suffixes:
|
||||||
|
# bah, no C_EXTENSION available. Occurs on pypy without cpyext
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
suffixes = [".pyd"]
|
||||||
|
else:
|
||||||
|
suffixes = [".so"]
|
||||||
|
|
||||||
|
return suffixes
|
||||||
|
|
||||||
|
def _ensure_dir(filename):
|
||||||
|
dirname = os.path.dirname(filename)
|
||||||
|
if dirname and not os.path.isdir(dirname):
|
||||||
|
os.makedirs(dirname)
|
|
@ -0,0 +1,68 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from cryptography.hazmat.bindings.openssl.binding import (
|
||||||
|
_OpenSSLErrorWithText,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _Reasons(utils.Enum):
|
||||||
|
BACKEND_MISSING_INTERFACE = 0
|
||||||
|
UNSUPPORTED_HASH = 1
|
||||||
|
UNSUPPORTED_CIPHER = 2
|
||||||
|
UNSUPPORTED_PADDING = 3
|
||||||
|
UNSUPPORTED_MGF = 4
|
||||||
|
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
|
||||||
|
UNSUPPORTED_ELLIPTIC_CURVE = 6
|
||||||
|
UNSUPPORTED_SERIALIZATION = 7
|
||||||
|
UNSUPPORTED_X509 = 8
|
||||||
|
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
|
||||||
|
UNSUPPORTED_DIFFIE_HELLMAN = 10
|
||||||
|
UNSUPPORTED_MAC = 11
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedAlgorithm(Exception):
|
||||||
|
def __init__(
|
||||||
|
self, message: str, reason: typing.Optional[_Reasons] = None
|
||||||
|
) -> None:
|
||||||
|
super(UnsupportedAlgorithm, self).__init__(message)
|
||||||
|
self._reason = reason
|
||||||
|
|
||||||
|
|
||||||
|
class AlreadyFinalized(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AlreadyUpdated(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotYetFinalized(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidTag(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSignature(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InternalError(Exception):
|
||||||
|
def __init__(
|
||||||
|
self, msg: str, err_code: typing.List["_OpenSSLErrorWithText"]
|
||||||
|
) -> None:
|
||||||
|
super(InternalError, self).__init__(msg)
|
||||||
|
self.err_code = err_code
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKey(Exception):
|
||||||
|
pass
|
|
@ -0,0 +1,212 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import InvalidSignature
|
||||||
|
from cryptography.hazmat.primitives import hashes, padding
|
||||||
|
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||||
|
from cryptography.hazmat.primitives.hmac import HMAC
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidToken(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_MAX_CLOCK_SKEW = 60
|
||||||
|
|
||||||
|
|
||||||
|
class Fernet:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
key: typing.Union[bytes, str],
|
||||||
|
backend: typing.Any = None,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
key = base64.urlsafe_b64decode(key)
|
||||||
|
except binascii.Error as exc:
|
||||||
|
raise ValueError(
|
||||||
|
"Fernet key must be 32 url-safe base64-encoded bytes."
|
||||||
|
) from exc
|
||||||
|
if len(key) != 32:
|
||||||
|
raise ValueError(
|
||||||
|
"Fernet key must be 32 url-safe base64-encoded bytes."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._signing_key = key[:16]
|
||||||
|
self._encryption_key = key[16:]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate_key(cls) -> bytes:
|
||||||
|
return base64.urlsafe_b64encode(os.urandom(32))
|
||||||
|
|
||||||
|
def encrypt(self, data: bytes) -> bytes:
|
||||||
|
return self.encrypt_at_time(data, int(time.time()))
|
||||||
|
|
||||||
|
def encrypt_at_time(self, data: bytes, current_time: int) -> bytes:
|
||||||
|
iv = os.urandom(16)
|
||||||
|
return self._encrypt_from_parts(data, current_time, iv)
|
||||||
|
|
||||||
|
def _encrypt_from_parts(
|
||||||
|
self, data: bytes, current_time: int, iv: bytes
|
||||||
|
) -> bytes:
|
||||||
|
utils._check_bytes("data", data)
|
||||||
|
|
||||||
|
padder = padding.PKCS7(algorithms.AES.block_size).padder()
|
||||||
|
padded_data = padder.update(data) + padder.finalize()
|
||||||
|
encryptor = Cipher(
|
||||||
|
algorithms.AES(self._encryption_key),
|
||||||
|
modes.CBC(iv),
|
||||||
|
).encryptor()
|
||||||
|
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
|
||||||
|
|
||||||
|
basic_parts = (
|
||||||
|
b"\x80"
|
||||||
|
+ current_time.to_bytes(length=8, byteorder="big")
|
||||||
|
+ iv
|
||||||
|
+ ciphertext
|
||||||
|
)
|
||||||
|
|
||||||
|
h = HMAC(self._signing_key, hashes.SHA256())
|
||||||
|
h.update(basic_parts)
|
||||||
|
hmac = h.finalize()
|
||||||
|
return base64.urlsafe_b64encode(basic_parts + hmac)
|
||||||
|
|
||||||
|
def decrypt(self, token: bytes, ttl: typing.Optional[int] = None) -> bytes:
|
||||||
|
timestamp, data = Fernet._get_unverified_token_data(token)
|
||||||
|
if ttl is None:
|
||||||
|
time_info = None
|
||||||
|
else:
|
||||||
|
time_info = (ttl, int(time.time()))
|
||||||
|
return self._decrypt_data(data, timestamp, time_info)
|
||||||
|
|
||||||
|
def decrypt_at_time(
|
||||||
|
self, token: bytes, ttl: int, current_time: int
|
||||||
|
) -> bytes:
|
||||||
|
if ttl is None:
|
||||||
|
raise ValueError(
|
||||||
|
"decrypt_at_time() can only be used with a non-None ttl"
|
||||||
|
)
|
||||||
|
timestamp, data = Fernet._get_unverified_token_data(token)
|
||||||
|
return self._decrypt_data(data, timestamp, (ttl, current_time))
|
||||||
|
|
||||||
|
def extract_timestamp(self, token: bytes) -> int:
|
||||||
|
timestamp, data = Fernet._get_unverified_token_data(token)
|
||||||
|
# Verify the token was not tampered with.
|
||||||
|
self._verify_signature(data)
|
||||||
|
return timestamp
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_unverified_token_data(token: bytes) -> typing.Tuple[int, bytes]:
|
||||||
|
utils._check_bytes("token", token)
|
||||||
|
try:
|
||||||
|
data = base64.urlsafe_b64decode(token)
|
||||||
|
except (TypeError, binascii.Error):
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
if not data or data[0] != 0x80:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
if len(data) < 9:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
timestamp = int.from_bytes(data[1:9], byteorder="big")
|
||||||
|
return timestamp, data
|
||||||
|
|
||||||
|
def _verify_signature(self, data: bytes) -> None:
|
||||||
|
h = HMAC(self._signing_key, hashes.SHA256())
|
||||||
|
h.update(data[:-32])
|
||||||
|
try:
|
||||||
|
h.verify(data[-32:])
|
||||||
|
except InvalidSignature:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
def _decrypt_data(
|
||||||
|
self,
|
||||||
|
data: bytes,
|
||||||
|
timestamp: int,
|
||||||
|
time_info: typing.Optional[typing.Tuple[int, int]],
|
||||||
|
) -> bytes:
|
||||||
|
if time_info is not None:
|
||||||
|
ttl, current_time = time_info
|
||||||
|
if timestamp + ttl < current_time:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
if current_time + _MAX_CLOCK_SKEW < timestamp:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
self._verify_signature(data)
|
||||||
|
|
||||||
|
iv = data[9:25]
|
||||||
|
ciphertext = data[25:-32]
|
||||||
|
decryptor = Cipher(
|
||||||
|
algorithms.AES(self._encryption_key), modes.CBC(iv)
|
||||||
|
).decryptor()
|
||||||
|
plaintext_padded = decryptor.update(ciphertext)
|
||||||
|
try:
|
||||||
|
plaintext_padded += decryptor.finalize()
|
||||||
|
except ValueError:
|
||||||
|
raise InvalidToken
|
||||||
|
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
|
||||||
|
|
||||||
|
unpadded = unpadder.update(plaintext_padded)
|
||||||
|
try:
|
||||||
|
unpadded += unpadder.finalize()
|
||||||
|
except ValueError:
|
||||||
|
raise InvalidToken
|
||||||
|
return unpadded
|
||||||
|
|
||||||
|
|
||||||
|
class MultiFernet:
|
||||||
|
def __init__(self, fernets: typing.Iterable[Fernet]):
|
||||||
|
fernets = list(fernets)
|
||||||
|
if not fernets:
|
||||||
|
raise ValueError(
|
||||||
|
"MultiFernet requires at least one Fernet instance"
|
||||||
|
)
|
||||||
|
self._fernets = fernets
|
||||||
|
|
||||||
|
def encrypt(self, msg: bytes) -> bytes:
|
||||||
|
return self.encrypt_at_time(msg, int(time.time()))
|
||||||
|
|
||||||
|
def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
|
||||||
|
return self._fernets[0].encrypt_at_time(msg, current_time)
|
||||||
|
|
||||||
|
def rotate(self, msg: bytes) -> bytes:
|
||||||
|
timestamp, data = Fernet._get_unverified_token_data(msg)
|
||||||
|
for f in self._fernets:
|
||||||
|
try:
|
||||||
|
p = f._decrypt_data(data, timestamp, None)
|
||||||
|
break
|
||||||
|
except InvalidToken:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
iv = os.urandom(16)
|
||||||
|
return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
|
||||||
|
|
||||||
|
def decrypt(self, msg: bytes, ttl: typing.Optional[int] = None) -> bytes:
|
||||||
|
for f in self._fernets:
|
||||||
|
try:
|
||||||
|
return f.decrypt(msg, ttl)
|
||||||
|
except InvalidToken:
|
||||||
|
pass
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
def decrypt_at_time(
|
||||||
|
self, msg: bytes, ttl: int, current_time: int
|
||||||
|
) -> bytes:
|
||||||
|
for f in self._fernets:
|
||||||
|
try:
|
||||||
|
return f.decrypt_at_time(msg, ttl, current_time)
|
||||||
|
except InvalidToken:
|
||||||
|
pass
|
||||||
|
raise InvalidToken
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue