OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

Compare Revisions

  • This comparison shows the changes necessary to convert path
    /openrisc/trunk
    from Rev 753 to Rev 754
    Reverse comparison

Rev 753 → Rev 754

/gnu-dev/or1k-gcc/libjava/contrib/rebuild-gcj-db.in
0,0 → 1,27
#!/bin/bash
# rebuild-gcj-db
 
## Copyright (C) 2000, 2002, 2003, 2008 Free Software Foundation
##
## This file is part of libgcj.
##
## This software is copyrighted work licensed under the terms of the
## Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
## details.
 
# Rebuild all the standard databases.
for i in `@prefix@/bin/gcc --print-multi-lib 2>/dev/null`; do
multilib=`echo $i | sed -e 's/^.*;//' | sed -e 's/\@/ -/g'`
dirname=`@prefix@/bin/gcc -print-multi-os-directory $multilib 2>/dev/null`
base=@prefix@/lib/$dirname
dbLocation=`@prefix@/bin/gcj-dbtool -p $base`
libdir=$base/gcj
if ! test -d $libdir; then
# No shared libraries here.
continue
fi
dirname $dbLocation | xargs mkdir -p
@prefix@/bin/gcj-dbtool -n $dbLocation 64
find $libdir -follow -name '*.db' -print0 | \
@prefix@/bin/gcj-dbtool -0 -m $dbLocation $dbLocation
done
/gnu-dev/or1k-gcc/libjava/contrib/aotcompile.py.in
0,0 → 1,424
# -*- python -*-
 
## Copyright (C) 2005, 2006, 2008 Free Software Foundation
## Written by Gary Benson <gbenson@redhat.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
 
import classfile
import copy
# The md5 module is deprecated in Python 2.5
try:
from hashlib import md5
except ImportError:
from md5 import md5
import operator
import os
import sys
import cStringIO as StringIO
import zipfile
 
PATHS = {"make": "@MAKE@",
"gcj": "@prefix@/bin/gcj@gcc_suffix@",
"dbtool": "@prefix@/bin/gcj-dbtool@gcc_suffix@"}
 
MAKEFLAGS = []
GCJFLAGS = ["-fPIC", "-findirect-dispatch", "-fjni"]
LDFLAGS = ["-Wl,-Bsymbolic"]
 
MAX_CLASSES_PER_JAR = 1024
MAX_BYTES_PER_JAR = 1048576
 
MAKEFILE = "Makefile"
 
MAKEFILE_HEADER = '''\
GCJ = %(gcj)s
DBTOOL = %(dbtool)s
GCJFLAGS = %(gcjflags)s
LDFLAGS = %(ldflags)s
 
%%.o: %%.jar
$(GCJ) -c $(GCJFLAGS) $< -o $@
 
TARGETS = \\
%(targets)s
 
all: $(TARGETS)'''
 
MAKEFILE_JOB = '''
%(base)s_SOURCES = \\
%(jars)s
 
%(base)s_OBJECTS = \\
$(%(base)s_SOURCES:.jar=.o)
 
%(dso)s: $(%(base)s_OBJECTS)
$(GCJ) -shared $(GCJFLAGS) $(LDFLAGS) $^ -o $@
 
%(db)s: $(%(base)s_SOURCES)
$(DBTOOL) -n $@ 64
for jar in $^; do \\
$(DBTOOL) -f $@ $$jar \\
%(libdir)s/%(dso)s; \\
done'''
 
ZIPMAGIC, CLASSMAGIC = "PK\x03\x04", "\xca\xfe\xba\xbe"
 
class Error(Exception):
pass
 
class Compiler:
def __init__(self, srcdir, libdir, prefix = None):
self.srcdir = os.path.abspath(srcdir)
self.libdir = os.path.abspath(libdir)
if prefix is None:
self.dstdir = self.libdir
else:
self.dstdir = os.path.join(prefix, self.libdir.lstrip(os.sep))
 
# Calling code may modify these parameters
self.gcjflags = copy.copy(GCJFLAGS)
self.ldflags = copy.copy(LDFLAGS)
self.makeflags = copy.copy(MAKEFLAGS)
self.exclusions = []
 
def compile(self):
"""Search srcdir for classes and jarfiles, then generate
solibs and mappings databases for them all in libdir."""
if not os.path.isdir(self.dstdir):
os.makedirs(self.dstdir)
oldcwd = os.getcwd()
os.chdir(self.dstdir)
try:
jobs = self.getJobList()
if not jobs:
raise Error, "nothing to do"
self.writeMakefile(MAKEFILE, jobs)
for job in jobs:
job.writeJars()
system([PATHS["make"]] + self.makeflags)
for job in jobs:
job.clean()
os.unlink(MAKEFILE)
finally:
os.chdir(oldcwd)
 
def getJobList(self):
"""Return all jarfiles and class collections in srcdir."""
jobs = weed_jobs(find_jobs(self.srcdir, self.exclusions))
set_basenames(jobs)
return jobs
 
def writeMakefile(self, path, jobs):
"""Generate a makefile to build the solibs and mappings
databases for the specified list of jobs."""
fp = open(path, "w")
print >>fp, MAKEFILE_HEADER % {
"gcj": PATHS["gcj"],
"dbtool": PATHS["dbtool"],
"gcjflags": " ".join(self.gcjflags),
"ldflags": " ".join(self.ldflags),
"targets": " \\\n".join(reduce(operator.add, [
(job.dsoName(), job.dbName()) for job in jobs]))}
for job in jobs:
values = job.ruleArguments()
values["libdir"] = self.libdir
print >>fp, MAKEFILE_JOB % values
fp.close()
 
def find_jobs(dir, exclusions = ()):
"""Scan a directory and find things to compile: jarfiles (zips,
wars, ears, rars, etc: we go by magic rather than file extension)
and directories of classes."""
def visit((classes, zips), dir, items):
for item in items:
path = os.path.join(dir, item)
if os.path.islink(path) or not os.path.isfile(path):
continue
magic = open(path, "r").read(4)
if magic == ZIPMAGIC:
zips.append(path)
elif magic == CLASSMAGIC:
classes.append(path)
classes, paths = [], []
os.path.walk(dir, visit, (classes, paths))
# Convert the list of classes into a list of directories
while classes:
# XXX this requires the class to be correctly located in its heirachy.
path = classes[0][:-len(os.sep + classname(classes[0]) + ".class")]
paths.append(path)
classes = [cls for cls in classes if not cls.startswith(path)]
# Handle exclusions. We're really strict about them because the
# option is temporary in aot-compile-rpm and dead options left in
# specfiles will hinder its removal.
for path in exclusions:
if path in paths:
paths.remove(path)
else:
raise Error, "%s: path does not exist or is not a job" % path
# Build the list of jobs
jobs = []
paths.sort()
for path in paths:
if os.path.isfile(path):
job = JarJob(path)
else:
job = DirJob(path)
if len(job.classes):
jobs.append(job)
return jobs
 
class Job:
"""A collection of classes that will be compiled as a unit."""
def __init__(self, path):
self.path, self.classes, self.blocks = path, {}, None
self.classnames = {}
 
def addClass(self, bytes, name):
"""Subclasses call this from their __init__ method for
every class they find."""
digest = md5(bytes).digest()
self.classes[digest] = bytes
self.classnames[digest] = name
 
def __makeBlocks(self):
"""Split self.classes into chunks that can be compiled to
native code by gcj. In the majority of cases this is not
necessary -- the job will have come from a jarfile which will
be equivalent to the one we generate -- but this only happens
_if_ the job was a jarfile and _if_ the jarfile isn't too big
and _if_ the jarfile has the correct extension and _if_ all
classes are correctly named and _if_ the jarfile has no
embedded jarfiles. Fitting a special case around all these
conditions is tricky to say the least.
 
Note that this could be called at the end of each subclass's
__init__ method. The reason this is not done is because we
need to parse every class file. This is slow, and unnecessary
if the job is subsetted."""
names = {}
for hash, bytes in self.classes.items():
try:
name = classname(bytes)
except:
warn("job %s: class %s malformed or not a valid class file" \
% (self.path, self.classnames[hash]))
raise
if not names.has_key(name):
names[name] = []
names[name].append(hash)
names = names.items()
# We have to sort somehow, or the jars we generate
# We sort by name in a simplistic attempt to keep related
# classes together so inter-class optimisation can happen.
names.sort()
self.blocks, bytes = [[]], 0
for name, hashes in names:
for hash in hashes:
if len(self.blocks[-1]) >= MAX_CLASSES_PER_JAR \
or bytes >= MAX_BYTES_PER_JAR:
self.blocks.append([])
bytes = 0
self.blocks[-1].append((name, hash))
bytes += len(self.classes[hash])
 
# From Archit Shah:
# The implementation and the documentation don't seem to match.
#
# [a, b].isSubsetOf([a]) => True
#
# Identical copies of all classes this collection do not exist
# in the other. I think the method should be named isSupersetOf
# and the documentation should swap uses of "this" and "other"
#
# XXX think about this when I've had more sleep...
def isSubsetOf(self, other):
"""Returns True if identical copies of all classes in this
collection exist in the other."""
for item in other.classes.keys():
if not self.classes.has_key(item):
return False
return True
 
def __targetName(self, ext):
return self.basename + ext
 
def tempJarName(self, num):
return self.__targetName(".%d.jar" % (num + 1))
 
def tempObjName(self, num):
return self.__targetName(".%d.o" % (num + 1))
 
def dsoName(self):
"""Return the filename of the shared library that will be
built from this job."""
return self.__targetName(".so")
 
def dbName(self):
"""Return the filename of the mapping database that will be
built from this job."""
return self.__targetName(".db")
 
def ruleArguments(self):
"""Return a dictionary of values that when substituted
into MAKEFILE_JOB will create the rules required to build
the shared library and mapping database for this job."""
if self.blocks is None:
self.__makeBlocks()
return {
"base": "".join(
[c.isalnum() and c or "_" for c in self.dsoName()]),
"jars": " \\\n".join(
[self.tempJarName(i) for i in xrange(len(self.blocks))]),
"dso": self.dsoName(),
"db": self.dbName()}
 
def writeJars(self):
"""Generate jarfiles that can be native compiled by gcj."""
if self.blocks is None:
self.__makeBlocks()
for block, i in zip(self.blocks, xrange(len(self.blocks))):
jar = zipfile.ZipFile(self.tempJarName(i), "w", zipfile.ZIP_STORED)
for name, hash in block:
jar.writestr(
zipfile.ZipInfo("%s.class" % name), self.classes[hash])
jar.close()
 
def clean(self):
"""Delete all temporary files created during this job's build."""
if self.blocks is None:
self.__makeBlocks()
for i in xrange(len(self.blocks)):
os.unlink(self.tempJarName(i))
os.unlink(self.tempObjName(i))
 
class JarJob(Job):
"""A Job whose origin was a jarfile."""
 
def __init__(self, path):
Job.__init__(self, path)
self._walk(zipfile.ZipFile(path, "r"))
 
def _walk(self, zf):
for name in zf.namelist():
bytes = zf.read(name)
if bytes.startswith(ZIPMAGIC):
self._walk(zipfile.ZipFile(StringIO.StringIO(bytes)))
elif bytes.startswith(CLASSMAGIC):
self.addClass(bytes, name)
 
class DirJob(Job):
"""A Job whose origin was a directory of classfiles."""
 
def __init__(self, path):
Job.__init__(self, path)
os.path.walk(path, DirJob._visit, self)
 
def _visit(self, dir, items):
for item in items:
path = os.path.join(dir, item)
if os.path.islink(path) or not os.path.isfile(path):
continue
fp = open(path, "r")
magic = fp.read(4)
if magic == CLASSMAGIC:
self.addClass(magic + fp.read(), name)
def weed_jobs(jobs):
"""Remove any jarfiles that are completely contained within
another. This is more common than you'd think, and we only
need one nativified copy of each class after all."""
jobs = copy.copy(jobs)
while True:
for job1 in jobs:
for job2 in jobs:
if job1 is job2:
continue
if job1.isSubsetOf(job2):
msg = "subsetted %s" % job2.path
if job2.isSubsetOf(job1):
if (isinstance(job1, DirJob) and
isinstance(job2, JarJob)):
# In the braindead case where a package
# contains an expanded copy of a jarfile
# the jarfile takes precedence.
continue
msg += " (identical)"
warn(msg)
jobs.remove(job2)
break
else:
continue
break
else:
break
continue
return jobs
 
def set_basenames(jobs):
"""Ensure that each jarfile has a different basename."""
names = {}
for job in jobs:
name = os.path.basename(job.path)
if not names.has_key(name):
names[name] = []
names[name].append(job)
for name, set in names.items():
if len(set) == 1:
set[0].basename = name
continue
# prefix the jar filenames to make them unique
# XXX will not work in most cases -- needs generalising
set = [(job.path.split(os.sep), job) for job in set]
minlen = min([len(bits) for bits, job in set])
set = [(bits[-minlen:], job) for bits, job in set]
bits = apply(zip, [bits for bits, job in set])
while True:
row = bits[-2]
for bit in row[1:]:
if bit != row[0]:
break
else:
del bits[-2]
continue
break
set = zip(
["_".join(name) for name in apply(zip, bits[-2:])],
[job for bits, job in set])
for name, job in set:
warn("building %s as %s" % (job.path, name))
job.basename = name
# XXX keep this check until we're properly general
names = {}
for job in jobs:
name = job.basename
if names.has_key(name):
raise Error, "%s: duplicate jobname" % name
names[name] = 1
 
def system(command):
"""Execute a command."""
status = os.spawnv(os.P_WAIT, command[0], command)
if status > 0:
raise Error, "%s exited with code %d" % (command[0], status)
elif status < 0:
raise Error, "%s killed by signal %d" % (command[0], -status)
 
def warn(msg):
"""Print a warning message."""
print >>sys.stderr, "%s: warning: %s" % (
os.path.basename(sys.argv[0]), msg)
 
def classname(bytes):
"""Extract the class name from the bytes of a class file."""
klass = classfile.Class(bytes)
return klass.constants[klass.constants[klass.name][1]][1]
/gnu-dev/or1k-gcc/libjava/contrib/aot-compile-rpm.in
0,0 → 1,97
#!/usr/bin/env python
 
## Copyright (C) 2005, 2006, 2007, 2011 Free Software Foundation
## Written by Gary Benson <gbenson@redhat.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
 
import sys
sys.path.insert(0, "@python_mod_dir_expanded@")
import aotcompile
import os
 
def libdir():
cmd = "%s -p" % aotcompile.PATHS["dbtool"]
dir = os.path.abspath(os.popen(cmd, "r").readline().rstrip())
dir, base = os.path.split(dir)
if base != "classmap.db":
raise aotcompile.Error, "%s: unexpected output" % cmd
dir, base = os.path.split(dir)
if not base.startswith("gcj-"):
raise aotcompile.Error, "%s: unexpected output" % cmd
return dir
 
def writeSourceList(srcdir, dstpath):
def visit(fp, dir, items):
for item in items:
path = os.path.join(dir, item)
if os.path.isfile(path):
print >>fp, path
dstdir = os.path.dirname(dstpath)
if not os.path.isdir(dstdir):
os.makedirs(dstdir)
os.path.walk(srcdir, visit, open(dstpath, "w"))
 
def copy(srcdir, dstdir, suffix):
srcdir = os.path.join(srcdir, suffix.lstrip(os.sep))
dstdir = os.path.join(dstdir, suffix.lstrip(os.sep))
os.makedirs(os.path.dirname(dstdir))
aotcompile.system(("/bin/cp", "-a", srcdir, dstdir))
 
try:
name = os.environ.get("RPM_PACKAGE_NAME")
if name is None:
raise aotcompile.Error, "not for use outside rpm specfiles"
arch = os.environ.get("RPM_ARCH")
if arch == "noarch":
raise aotcompile.Error, "cannot be used on noarch packages"
srcdir = os.environ.get("RPM_BUILD_ROOT")
if srcdir in (None, "/"):
raise aotcompile.Error, "bad $RPM_BUILD_ROOT"
tmpdir = os.path.join(os.getcwd(), "aot-compile-rpm")
if os.path.exists(tmpdir):
raise aotcompile.Error, "%s exists" % tmpdir
dstdir = os.path.join(libdir(), "gcj", name)
 
compiler = aotcompile.Compiler(srcdir, dstdir, tmpdir)
compiler.gcjflags[0:0] = os.environ.get("RPM_OPT_FLAGS", "").split()
 
# XXX: This script should not accept options, because having
# them it cannot be integrated into rpm. But, gcj cannot
# build each and every jarfile yet, so we must be able to
# exclude until it can.
# XXX --exclude is also used in the jonas rpm to stop
# everything being made a subset of the mammoth client
# jarfile. Should adjust the subset checker's bias to
# favour many small jarfiles over one big one.
try:
options, exclusions = sys.argv[1:], []
while options:
if options.pop(0) != "--exclude":
raise ValueError
compiler.exclusions.append(
os.path.join(srcdir, options.pop(0).lstrip(os.sep)))
except:
print >>sys.stderr, "usage: %s [--exclude PATH]..." % (
os.path.basename(sys.argv[0]))
sys.exit(1)
 
sourcelist = os.path.join(tmpdir, "sources.list")
writeSourceList(os.getcwd(), sourcelist)
compiler.gcjflags.append("-fsource-filename=" + sourcelist)
 
compiler.compile()
copy(tmpdir, srcdir, dstdir)
 
except aotcompile.Error, e:
print >>sys.stderr, "%s: error: %s" % (
os.path.basename(sys.argv[0]), e)
sys.exit(1)
/gnu-dev/or1k-gcc/libjava/contrib/generate-cacerts.pl.in
0,0 → 1,106
#!/usr/bin/perl
 
# Copyright (C) 2007, 2009 Free Software Foundation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
 
# generate-cacerts.pl generates a gkeytool keystore named 'cacerts'
# from OpenSSL's certificate bundle.
 
# First extract each of OpenSSL's bundled certificates into its own
# aliased filename.
chomp($file=@ARGV[0]);
$file = "/etc/pki/tls/cert.pem" unless $file ne "";
open(CERTS, $file);
@certs = <CERTS>;
close(CERTS);
 
$pem_file_number = 0;
$writing_cert = 0;
foreach $cert (@certs)
{
if ($cert eq "-----BEGIN CERTIFICATE-----\n")
{
if ($writing_cert != 0)
{
die "$file is malformed.";
}
$pem_file_number++;
# Numbering each file guarantees that cert aliases will be
# unique.
$pem_file_name = "$pem_file_number$cert_alias.pem";
$writing_cert = 1;
open(PEM, ">$pem_file_name");
print PEM $cert;
}
elsif ($cert eq "-----END CERTIFICATE-----\n")
{
$writing_cert = 0;
print PEM $cert;
close(PEM);
}
elsif ($cert =~ /Issuer: /)
{
# Generate an alias using the OU and CN attributes of the
# Issuer field if both are present, otherwise use only the CN
# attribute. The Issuer field must have either the OU or the
# CN attribute.
$_ = $cert;
if ($cert =~ /OU=/)
{
s/Issuer:.*?OU=//;
# Remove other occurrences of OU=.
s/OU=.*CN=//;
# Remove CN= if there were not other occurrences of OU=.
s/CN=//;
}
elsif ($cert =~ /CN=/)
{
s/Issuer:.*CN=//;
}
s/\W//g;
tr/A-Z/a-z/;
$cert_alias = $_
}
else
{
if ($writing_cert == 1)
{
print PEM $cert;
}
}
}
 
# Check that the correct number of .pem files were produced.
@pem_files = <*.pem>;
if (@pem_files != $pem_file_number)
{
die "Number of .pem files produced does not match".
" number of certs read from $file.";
}
 
# Now store each cert in the 'cacerts' file using gkeytool.
$certs_written_count = 0;
foreach $pem_file (@pem_files)
{
system "yes | gkeytool@gcc_suffix@ -import -alias `basename $pem_file .pem`".
" -keystore cacerts -storepass '' -file $pem_file".
" 2>&1 >/dev/null";
unlink($pem_file);
$certs_written_count++;
}
 
# Check that the correct number of certs were added to the keystore.
if ($certs_written_count != $pem_file_number)
{
die "Number of certs added to keystore does not match".
" number of certs read from $file.";
}
/gnu-dev/or1k-gcc/libjava/contrib/classfile.py
0,0 → 1,221
## Copyright (C) 2004, 2005 Free Software Foundation
## Written by Gary Benson <gbenson@redhat.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
 
"""Read Java(TM) class files."""
 
import cStringIO as StringIO
import struct
 
class Class:
def __init__(self, arg):
if hasattr(arg, "read"):
self.fp = arg
elif type(arg) == type(""):
if arg.startswith("\xca\xfe\xba\xbe"):
self.fp = StringIO.StringIO(arg)
else:
self.fp = open(arg, "r")
else:
raise TypeError, type(arg)
 
magic = self._read_int()
assert magic == 0xcafebabeL
minor, major = self._read(">HH")
self.version = (major, minor)
 
self.pool_integrity_checks = None
try:
assert False
except AssertionError:
self.pool_integrity_checks = []
 
self._read_constants_pool()
 
self.access_flags = self._read_short()
self.name = self._read_reference_Class()
self.super = self._read_reference_Class()
 
self.interfaces = self._read_interfaces()
self.fields = self._read_fieldsormethods()
self.methods = self._read_fieldsormethods()
self.attributes = self._read_attributes()
 
if self.pool_integrity_checks is not None:
for index, tag in self.pool_integrity_checks:
assert self.constants[index][0] == tag
 
del self.fp, self.pool_integrity_checks
 
def __repr__(self):
result = []
attrs = [attr for attr in dir(self)
if not attr.startswith("_") and attr != "Member"]
attrs.sort()
for attr in attrs:
result.append("%-13s %s" % (
attr + ":", attr == "constants" and
"<ELIDED>" or repr(getattr(self, attr))))
return "\n".join(result)
 
def _read_constants_pool(self):
self.constants = {}
skip = False
for i in xrange(1, self._read_short()):
if skip:
skip = False
continue
tag = {
1: "Utf8", 3: "Integer", 4: "Float", 5: "Long",
6: "Double", 7: "Class", 8: "String", 9: "Fieldref",
10: "Methodref", 11: "InterfaceMethodref",
12: "NameAndType"}[self._read_byte()]
skip = tag in ("Long", "Double") # crack crack crack!
self.constants[i] = (tag, getattr(self, "_read_constant_" + tag)())
 
def _read_interfaces(self):
result = []
for i in xrange(self._read_short()):
result.append(self._read_reference_Class())
return result
 
def _read_fieldsormethods(self):
result = []
for i in xrange(self._read_short()):
result.append(self.Member(self))
return result
 
class Member:
def __init__(self, source):
self.access_flags = source._read_short()
self.name = source._read_reference_Utf8()
self.descriptor = source._read_reference_Utf8()
self.attributes = source._read_attributes()
 
def __repr__(self):
result = []
attrs = [attr for attr in dir(self) if not attr.startswith("_")]
attrs.sort()
for attr in attrs:
value = getattr(self, attr)
if attr == "attributes" and value.has_key("Code"):
value = value.copy()
value.update({"Code": "<ELIDED>"})
result.append("%-13s %s" % (
attr + ":", repr(value).replace(
"'Code': '<ELIDED>'", "'Code': <ELIDED>")))
return ("\n%s" % (15 * " ")).join(result)
 
def _read_attributes(self):
result = {}
for i in xrange(self._read_short()):
name = self._read_reference_Utf8()
data = self.fp.read(self._read_int())
assert not result.has_key(name)
result[name] = data
return result
 
# Constants pool reference reader convenience functions
 
def _read_reference_Utf8(self):
return self._read_references("Utf8")[0]
 
def _read_reference_Class(self):
return self._read_references("Class")[0]
 
def _read_reference_Class_NameAndType(self):
return self._read_references("Class", "NameAndType")
 
def _read_references(self, *args):
result = []
for arg in args:
index = self._read_short()
if self.pool_integrity_checks is not None:
self.pool_integrity_checks.append((index, arg))
result.append(index)
return result
 
# Constants pool constant reader functions
 
def _read_constant_Utf8(self):
constant = self.fp.read(self._read_short())
try:
constant = constant.decode("utf-8")
except UnicodeError:
constant = _bork_utf8_decode(constant)
try:
constant = constant.encode("us-ascii")
except UnicodeError:
pass
return constant
 
def _read_constant_Integer(self):
return self._read_int()
 
def _read_constant_Float(self):
return self._read(">f")[0]
 
def _read_constant_Long(self):
return self._read(">q")[0]
 
def _read_constant_Double(self):
return self._read(">d")[0]
 
_read_constant_Class = _read_reference_Utf8
_read_constant_String = _read_reference_Utf8
_read_constant_Fieldref = _read_reference_Class_NameAndType
_read_constant_Methodref = _read_reference_Class_NameAndType
_read_constant_InterfaceMethodref = _read_reference_Class_NameAndType
 
def _read_constant_NameAndType(self):
return self._read_reference_Utf8(), self._read_reference_Utf8()
 
# Generic reader functions
 
def _read_int(self):
# XXX how else to read 32 bits on a 64-bit box?
h, l = map(long, self._read(">HH"))
return (h << 16) + l
 
def _read_short(self):
return self._read(">H")[0]
 
def _read_byte(self):
return self._read("B")[0]
 
def _read(self, fmt):
return struct.unpack(fmt, self.fp.read(struct.calcsize(fmt)))
 
def _bork_utf8_decode(data):
# more crack!
bytes, unicode = map(ord, data), ""
while bytes:
b1 = bytes.pop(0)
if b1 & 0x80:
assert b1 & 0x40
b2 = bytes.pop(0)
assert b2 & 0xC0 == 0x80
if b1 & 0x20:
assert not b1 & 0x10
b3 = bytes.pop(0)
assert b3 & 0xC0 == 0x80
unicode += unichr(
((b1 & 0x0f) << 12) + ((b2 & 0x3f) << 6) + (b3 & 0x3f))
else:
unicode += unichr(((b1 & 0x1f) << 6) + (b2 & 0x3f))
else:
unicode += unichr(b1)
return unicode
 
if __name__ == "__main__":
print Class("/usr/share/katana/build/ListDependentClasses.class")
 
/gnu-dev/or1k-gcc/libjava/contrib/aot-compile.in
0,0 → 1,88
#!/usr/bin/env python
 
## Copyright (C) 2006, 2011 Free Software Foundation
## Written by Gary Benson <gbenson@redhat.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
 
import sys
sys.path.insert(0, "@python_mod_dir_expanded@")
import aotcompile
import getopt
import os
 
usage = """\
Usage: %s [OPTION...] SRCDIR DSTDIR
AOT-compile all Java bytecode in SRCDIR into DSTDIR.
 
Options:
-M, --make=PATH make executable to use (%s)
-C, --gcj=PATH gcj executable to use (%s)
-D, --dbtool=PATH gcj-dbtool executable to use (%s)
-m, --makeflags=FLAGS flags to pass to make during build
-c, --gcjflags=FLAGS flags to pass to gcj during compilation
in addition to %s
-l, --ldflags=FLAGS flags to pass to gcj during linking
in addition to %s
-e, --exclude=PATH do not compile PATH
 
Extra flags may also be passed using the AOT_MAKEFLAGS, AOT_GCJFLAGS
and AOT_LDFLAGS environment variables.""" % (
os.path.basename(sys.argv[0]),
aotcompile.PATHS["make"],
aotcompile.PATHS["gcj"],
aotcompile.PATHS["dbtool"],
repr(" ".join(aotcompile.GCJFLAGS)),
repr(" ".join(aotcompile.LDFLAGS)))
 
try:
if os.environ.has_key("RPM_PACKAGE_NAME"):
raise aotcompile.Error, "not for use within rpm specfiles"
 
try:
opts, args = getopt.getopt(
sys.argv[1:],
"M:C:D:m:c:l:e:",
["make=", "gcj=", "dbtool=",
"makeflags=" "gcjflags=", "ldflags=",
"exclude="])
srcdir, dstdir = args
except:
print >>sys.stderr, usage
sys.exit(1)
 
compiler = aotcompile.Compiler(srcdir, dstdir)
for o, a in opts:
if o in ("-M", "--make"):
aotcompile.PATHS["make"] = a
if o in ("-C", "--gcj"):
aotcompile.PATHS["gcj"] = a
if o in ("-D", "--dbtool"):
aotcompile.PATHS["dbtool"] = a
if o in ("-m", "--makeflags"):
compiler.makeflags[0:0] = a.split()
if o in ("-c", "--gcjflags"):
compiler.gcjflags[0:0] = a.split()
if o in ("-l", "--ldflags"):
compiler.ldflags[0:0] = a.split()
if o in ("-e", "--exclude"):
compiler.exclusions.append(a)
compiler.makeflags[0:0] = os.environ.get("AOT_MAKEFLAGS", "").split()
compiler.gcjflags[0:0] = os.environ.get("AOT_GCJFLAGS", "").split()
compiler.ldflags[0:0] = os.environ.get("AOT_LDFLAGS", "").split()
 
compiler.compile()
 
except aotcompile.Error, e:
print >>sys.stderr, "%s: error: %s" % (
os.path.basename(sys.argv[0]), e)
sys.exit(1)

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.