diff --git a/.github/workflows/submit.yml b/.github/workflows/submit.yml
index 83a8c98a30d..b0a4e7b952e 100644
--- a/.github/workflows/submit.yml
+++ b/.github/workflows/submit.yml
@@ -277,7 +277,7 @@ jobs:
run: >
if cat test-results/testoutput/*/exitcode.txt | grep -q -v '^0$'
|| ! cat test-results/testoutput/*/Stats.txt | grep -q 'fail=0' ; then
- cat test-results/testoutput/*/JTreport/text/newfailures.txt ;
+ cat test-results/testoutput/*/JTreport/text/{newfailures,other_errors}.txt ;
exit 1 ;
fi
@@ -638,7 +638,7 @@ jobs:
run: >
if cat test-results/testoutput/*/exitcode.txt | grep -q -v '^0$'
|| ! cat test-results/testoutput/*/Stats.txt | grep -q 'fail=0' ; then
- cat test-results/testoutput/*/JTreport/text/newfailures.txt ;
+ cat test-results/testoutput/*/JTreport/text/{newfailures,other_errors}.txt ;
exit 1 ;
fi
@@ -1143,6 +1143,7 @@ jobs:
run: >
if ((Get-ChildItem -Path test-results\testoutput\*\exitcode.txt -Recurse | Select-String -Pattern '^0$' -NotMatch ).Count -gt 0) {
Get-Content -Path test-results\testoutput\*\JTreport\text\newfailures.txt ;
+ Get-Content -Path test-results\testoutput\*\JTreport\text\other_errors.txt ;
exit 1
}
@@ -1299,6 +1300,7 @@ jobs:
run: >
if ((Get-ChildItem -Path test-results\testoutput\*\exitcode.txt -Recurse | Select-String -Pattern '^0$' -NotMatch ).Count -gt 0) {
Get-Content -Path test-results\testoutput\*\JTreport\text\newfailures.txt ;
+ Get-Content -Path test-results\testoutput\*\JTreport\text\other_errors.txt ;
exit 1
}
@@ -1529,7 +1531,7 @@ jobs:
run: >
if cat test-results/testoutput/*/exitcode.txt | grep -q -v '^0$'
|| ! cat test-results/testoutput/*/Stats.txt | grep -q 'fail=0' ; then
- cat test-results/testoutput/*/JTreport/text/newfailures.txt ;
+ cat test-results/testoutput/*/JTreport/text/{newfailures,other_errors}.txt ;
exit 1 ;
fi
diff --git a/.jcheck/conf b/.jcheck/conf
index 6b38c2b0c86..a8d23a3c35b 100644
--- a/.jcheck/conf
+++ b/.jcheck/conf
@@ -1,7 +1,7 @@
[general]
project=jdk8u
jbs=JDK
-version=openjdk8u392
+version=openjdk8u402
[checks]
error=author,committer,reviewers,merge,issues,executable,symlink,message,hg-tag,whitespace
diff --git a/README b/README
index 40c9fbc6a77..c93292bfbe4 100644
--- a/README
+++ b/README
@@ -1,40 +1,10 @@
-README:
- This file should be located at the top of the OpenJDK Mercurial root
- repository. A full OpenJDK repository set (forest) should also include
- the following 6 nested repositories:
- "jdk", "hotspot", "langtools", "corba", "jaxws" and "jaxp".
+Welcome to OpenJDK!
+===================
- The root repository can be obtained with something like:
- hg clone http://hg.openjdk.java.net/jdk8/jdk8 openjdk8
-
- You can run the get_source.sh script located in the root repository to get
- the other needed repositories:
- cd openjdk8 && sh ./get_source.sh
+For information about building OpenJDK, including how to fully retrieve all
+source code, please see either of these:
- People unfamiliar with Mercurial should read the first few chapters of
- the Mercurial book: http://hgbook.red-bean.com/read/
+ * doc/building.html (html version)
+ * doc/building.md (markdown version)
- See http://openjdk.java.net/ for more information about OpenJDK.
-
-Simple Build Instructions:
-
- 0. Get the necessary system software/packages installed on your system, see
- http://hg.openjdk.java.net/jdk8/jdk8/raw-file/tip/README-builds.html
-
- 1. If you don't have a jdk7u7 or newer jdk, download and install it from
- http://java.sun.com/javase/downloads/index.jsp
- Add the /bin directory of this installation to your PATH environment
- variable.
-
- 2. Configure the build:
- bash ./configure
-
- 3. Build the OpenJDK:
- make all
- The resulting JDK image should be found in build/*/images/j2sdk-image
-
-where make is GNU make 3.81 or newer, /usr/bin/make on Linux usually
-is 3.81 or newer. Note that on Solaris, GNU make is called "gmake".
-
-Complete details are available in the file:
- http://hg.openjdk.java.net/jdk8/jdk8/raw-file/tip/README-builds.html
+See http://openjdk.java.net/ for more information about OpenJDK.
diff --git a/README-builds.html b/README-builds.html
deleted file mode 100644
index 2281650f9e6..00000000000
--- a/README-builds.html
+++ /dev/null
@@ -1,1389 +0,0 @@
-
-
- OpenJDK Build README
-
-
-
-
-
OpenJDK Build README
-
-
-
-
-
-
Introduction
-
-
This README file contains build instructions for the
-OpenJDK. Building the source code for the OpenJDK
-requires a certain degree of technical expertise.
-
-
!!!!!!!!!!!!!!! THIS IS A MAJOR RE-WRITE of this document. !!!!!!!!!!!!!
-
-
Some Headlines:
-
-
-
The build is now a "configure && make" style build
-
Any GNU make 3.81 or newer should work
-
The build should scale, i.e. more processors should cause the build to be
-done in less wall-clock time
-
Nested or recursive make invocations have been significantly reduced,
-as has the total fork/exec or spawning of sub processes during the build
-
Windows MKS usage is no longer supported
-
Windows Visual Studio vsvars*.bat and vcvars*.bat files are run
-automatically
-
Ant is no longer used when building the OpenJDK
-
Use of ALT_* environment variables for configuring the build is no longer
-supported
The OpenJDK sources are maintained with the revision control system
-Mercurial. If you are new to
-Mercurial, please see the Beginner Guides or refer to the Mercurial Book.
-The first few chapters of the book provide an excellent overview of Mercurial,
-what it is and how it works.
Once you have all the repositories, keep in mind that each repository is its
-own independent repository. You can also re-run ./get_source.sh anytime to
-pull over all the latest changesets in all the repositories. This set of
-nested repositories has been given the term "forest" and there are various
-ways to apply the same hg command to each of the repositories. For
-example, the script make/scripts/hgforest.sh can be used to repeat the
-same hg command on every repository, e.g.
-
-
cd YourOpenJDK
- bash ./make/scripts/hgforest.sh status
-
-
-
-
-
Repositories
-
-
The set of repositories and what they contain:
-
-
-
. (root) contains common configure and makefile logic
-
hotspot contains source code and make files for building the OpenJDK
-Hotspot Virtual Machine
-
langtools contains source code for the OpenJDK javac and language tools
-
jdk contains source code and make files for building the OpenJDK runtime
-libraries and misc files
-
jaxp contains source code for the OpenJDK JAXP functionality
-
jaxws contains source code for the OpenJDK JAX-WS functionality
-
corba contains source code for the OpenJDK Corba functionality
-
nashorn contains source code for the OpenJDK JavaScript implementation
-
-
-
Repository Source Guidelines
-
-
There are some very basic guidelines:
-
-
-
Use of whitespace in source files (.java, .c, .h, .cpp, and .hpp files) is
-restricted. No TABs, no trailing whitespace on lines, and files should not
-terminate in more than one blank line.
-
Files with execute permissions should not be added to the source
-repositories.
-
All generated files need to be kept isolated from the files maintained or
-managed by the source control system. The standard area for generated files
-is the top level build/ directory.
-
The default build process should be to build the product and nothing else,
-in one form, e.g. a product (optimized), debug (non-optimized, -g plus
-assert logic), or fastdebug (optimized, -g plus assert logic).
-
The .hgignore file in each repository must exist and should include
-^build/, ^dist/ and optionally any nbproject/private directories. It
-should NEVER include anything in the src/ or test/ or any managed
-directory area of a repository.
-
Directory names and file names should never contain blanks or non-printing
-characters.
-
Generated source or binary files should NEVER be added to the repository
-(that includes javah output). There are some exceptions to this rule, in
-particular with some of the generated configure scripts.
-
Files not needed for typical building or testing of the repository should
-not be added to the repository.
-
-
-
-
-
-
-
Building
-
-
The very first step in building the OpenJDK is making sure the system itself
-has everything it needs to do OpenJDK builds. Once a system is setup, it
-generally doesn't need to be done again.
-
-
Building the OpenJDK is now done with running a configure script which will
-try and find and verify you have everything you need, followed by running
-make, e.g.
-
-
-
bash ./configure
- make all
-
-
-
Where possible the configure script will attempt to located the various
-components in the default locations or via component specific variable
-settings. When the normal defaults fail or components cannot be found,
-additional configure options may be necessary to help configure find the
-necessary tools for the build, or you may need to re-visit the setup of your
-system due to missing software packages.
-
-
NOTE: The configure script file does not have execute permissions and
-will need to be explicitly run with bash, see the source guidelines.
-
-
-
-
-
-
System Setup
-
-
Before even attempting to use a system to build the OpenJDK there are some very
-basic system setups needed. For all systems:
-
-
-
Be sure the GNU make utility is version 3.81 or newer, e.g.
-run "make -version"
-
-
-
Install a Bootstrap JDK. All OpenJDK builds require access to a previously
-released JDK called the bootstrap JDK or boot JDK. The general rule is
-that the bootstrap JDK must be an instance of the previous major release of
-the JDK. In addition, there may be a requirement to use a release at or
-beyond a particular update level.
-
-
Building JDK 8 requires use of a version of JDK 7 this is at Update 7
-or newer. JDK 8 developers should not use JDK 8 as the boot JDK, to ensure
-that JDK 8 dependencies are not introduced into the parts of the system
-that are built with JDK 7.
-
-
The JDK 7 binaries can be downloaded from Oracle's JDK 7 download
-site.
-For build performance reasons it is very important that this bootstrap JDK
-be made available on the local disk of the machine doing the build. You
-should add its bin directory to the PATH environment variable. If
-configure has any issues finding this JDK, you may need to use the
-configure option --with-boot-jdk.
-
Ensure that GNU make, the Bootstrap JDK, and the compilers are all in your
-PATH environment variable.
Install XCode 4.5.2 and also
-install the "Command line tools" found under the preferences pane
-"Downloads"
-
-
-
-
-
Linux
-
-
With Linux, try and favor the system packages over building your own or getting
-packages from other areas. Most Linux builds should be possible with the
-system's available packages.
-
-
Note that some Linux systems have a habit of pre-populating your environment
-variables for you, for example JAVA_HOME might get pre-defined for you to
-refer to the JDK installed on your Linux system. You will need to unset
-JAVA_HOME. It's a good idea to run env and verify the environment variables
-you are getting from the default system settings make sense for building the
-OpenJDK.
-
-
-
-
Solaris
-
-
-
-
Studio Compilers
-
-
At a minimum, the Studio 12 Update 1 Compilers (containing
-version 5.10 of the C and C++ compilers) is required, including specific
-patches.
-
-
The Solaris SPARC patch list is:
-
-
-
118683-05: SunOS 5.10: Patch for profiling libraries and assembler
-
119963-21: SunOS 5.10: Shared library patch for C++
Building on Windows requires a Unix-like environment, notably a Unix-like
-shell. There are several such environments available of which
-Cygwin and
-MinGW/MSYS are currently supported for the
-OpenJDK build. One of the differences of these systems from standard Windows
-tools is the way they handle Windows path names, particularly path names which
-contain spaces, backslashes as path separators and possibly drive letters.
-Depending on the use case and the specifics of each environment these path
-problems can be solved by a combination of quoting whole paths, translating
-backslashes to forward slashes, escaping backslashes with additional
-backslashes and translating the path names to their "8.3"
-version.
-
-
-
-
CYGWIN
-
-
CYGWIN is an open source, Linux-like environment which tries to emulate a
-complete POSIX layer on Windows. It tries to be smart about path names and can
-usually handle all kinds of paths if they are correctly quoted or escaped
-although internally it maps drive letters <drive>: to a virtual directory
-/cygdrive/<drive>.
-
-
You can always use the cygpath utility to map pathnames with spaces or the
-backslash character into the C:/ style of pathname (called 'mixed'), e.g.
-cygpath -s -m "<path>".
-
-
Note that the use of CYGWIN creates a unique problem with regards to setting
-PATH. Normally on Windows the PATH variable contains directories
-separated with the ";" character (Solaris and Linux use ":"). With CYGWIN, it
-uses ":", but that means that paths like "C:/path" cannot be placed in the
-CYGWIN version of PATH and instead CYGWIN uses something like
-/cygdrive/c/path which CYGWIN understands, but only CYGWIN understands.
-
-
The OpenJDK build requires CYGWIN version 1.7.16 or newer. Information about
-CYGWIN can be obtained from the CYGWIN website at
-www.cygwin.com.
-
-
By default CYGWIN doesn't install all the tools required for building the
-OpenJDK. Along with the default installation, you need to install the following
-tools.
-
-
-
-
-
-
Binary Name
-
Category
-
Package
-
Description
-
-
-
-
-
ar.exe
-
Devel
-
binutils
-
The GNU assembler, linker and binary utilities
-
-
-
make.exe
-
Devel
-
make
-
The GNU version of the 'make' utility built for CYGWIN
-
-
-
m4.exe
-
Interpreters
-
m4
-
GNU implementation of the traditional Unix macro processor
-
-
-
cpio.exe
-
Utils
-
cpio
-
A program to manage archives of files
-
-
-
gawk.exe
-
Utils
-
awk
-
Pattern-directed scanning and processing language
-
-
-
file.exe
-
Utils
-
file
-
Determines file type using 'magic' numbers
-
-
-
zip.exe
-
Archive
-
zip
-
Package and compress (archive) files
-
-
-
unzip.exe
-
Archive
-
unzip
-
Extract compressed files in a ZIP archive
-
-
-
free.exe
-
System
-
procps
-
Display amount of free and used memory in the system
-
-
-
-
-
-
Note that the CYGWIN software can conflict with other non-CYGWIN software on
-your Windows system. CYGWIN provides a FAQ for known issues and problems, of particular interest is the
-section on BLODA (applications that interfere with
-CYGWIN).
-
-
-
-
MinGW/MSYS
-
-
MinGW ("Minimalist GNU for Windows") is a collection of free Windows specific
-header files and import libraries combined with GNU toolsets that allow one to
-produce native Windows programs that do not rely on any 3rd-party C runtime
-DLLs. MSYS is a supplement to MinGW which allows building applications and
-programs which rely on traditional UNIX tools to be present. Among others this
-includes tools like bash and make. See MinGW/MSYS for more information.
-
-
Like Cygwin, MinGW/MSYS can handle different types of path formats. They are
-internally converted to paths with forward slashes and drive letters
-<drive>: replaced by a virtual directory /<drive>. Additionally, MSYS
-automatically detects binaries compiled for the MSYS environment and feeds them
-with the internal, Unix-style path names. If native Windows applications are
-called from within MSYS programs their path arguments are automatically
-converted back to Windows style path names with drive letters and backslashes
-as path separators. This may cause problems for Windows applications which use
-forward slashes as parameter separator (e.g. cl /nologo /I) because MSYS may
-wrongly replace such parameters by drive letters.
-
-
In addition to the tools which will be installed by default, you have to
-manually install the msys-zip and msys-unzip packages. This can be easily
-done with the MinGW command line installer:
The 32-bit and 64-bit OpenJDK Windows build requires Microsoft Visual Studio
-C++ 2010 (VS2010) Professional Edition or Express compiler. The compiler and
-other tools are expected to reside in the location defined by the variable
-VS100COMNTOOLS which is set by the Microsoft Visual Studio installer.
-
-
Only the C++ part of VS2010 is needed. Try to let the installation go to the
-default install directory. Always reboot your system after installing VS2010.
-The system environment variable VS100COMNTOOLS should be set in your
-environment.
-
-
Make sure that TMP and TEMP are also set in the environment and refer to
-Windows paths that exist, like C:\temp, not /tmp, not /cygdrive/c/temp,
-and not C:/temp. C:\temp is just an example, it is assumed that this area
-is private to the user, so by default after installs you should see a unique
-user path in these variables.
-
-
-
-
Mac OS X
-
-
Make sure you get the right XCode version.
-
-
-
-
-
-
Configure
-
-
The basic invocation of the configure script looks like:
-
-
-
bash ./configure [options]
-
-
-
This will create an output directory containing the "configuration" and setup
-an area for the build result. This directory typically looks like:
-
-
-
build/linux-x64-normal-server-release
-
-
-
configure will try to figure out what system you are running on and where all
-necessary build components are. If you have all prerequisites for building
-installed, it should find everything. If it fails to detect any component
-automatically, it will exit and inform you about the problem. When this
-happens, read more below in the configure options.
-
-
Some examples:
-
-
-
Windows 32bit build with freetype specified:
- bash ./configure --with-freetype=/cygdrive/c/freetype-i586 --with-target-
-bits=32
Complete details on all the OpenJDK configure options can be seen with:
-
-
-
bash ./configure --help=short
-
-
-
Use -help to see all the configure options available. You can generate any
-number of different configurations, e.g. debug, release, 32, 64, etc.
-
-
Some of the more commonly used configure options are:
-
-
-
--enable-debug
- set the debug level to fastdebug (this is a shorthand for --with-debug-
- level=fastdebug)
-
-
-
-
-
-
--with-alsa=path
- select the location of the Advanced Linux Sound Architecture (ALSA)
-
-
Version 0.9.1 or newer of the ALSA files are required for building the
- OpenJDK on Linux. These Linux files are usually available from an "alsa" of
- "libasound" development package, and it's highly recommended that you try
- and use the package provided by the particular version of Linux that you are
- using.
--with-boot-jdk-jvmargs="args"
- provide the JVM options to be used to run the Bootstrap JDK
-
-
--with-cacerts=path
- select the path to the cacerts file.
-
-
See Certificate Authority on Wikipedia for a better understanding of the Certificate
- Authority (CA). A certificates file named "cacerts" represents a system-wide
- keystore with CA certificates. In JDK and JRE binary bundles, the "cacerts"
- file contains root CA certificates from several public CAs (e.g., VeriSign,
- Thawte, and Baltimore). The source contain a cacerts file without CA root
- certificates. Formal JDK builders will need to secure permission from each
- public CA and include the certificates into their own custom cacerts file.
- Failure to provide a populated cacerts file will result in verification
- errors of a certificate chain during runtime. By default an empty cacerts
- file is provided and that should be fine for most JDK developers.
-
-
-
-
-
-
--with-cups=path
- select the CUPS install location
-
-
The Common UNIX Printing System (CUPS) Headers are required for building the
- OpenJDK on Solaris and Linux. The Solaris header files can be obtained by
- installing the package SFWcups from the Solaris Software Companion
- CD/DVD, these often will be installed into the directory /opt/sfw/cups.
-
-
The CUPS header files can always be downloaded from
- www.cups.org.
-
-
--with-cups-include=path
- select the CUPS include directory location
-
-
--with-debug-level=level
- select the debug information level of release, fastdebug, or slowdebug
-
-
--with-dev-kit=path
- select location of the compiler install or developer install location
-
-
-
-
-
-
--with-freetype=path
- select the freetype files to use.
-
-
Expecting the freetype libraries under lib/ and the headers under
- include/.
-
-
Version 2.3 or newer of FreeType is required. On Unix systems required files
- can be available as part of your distribution (while you still may need to
- upgrade them). Note that you need development version of package that
- includes both the FreeType library and header files.
Note that by default FreeType is built with byte code hinting support
- disabled due to licensing restrictions. In this case, text appearance and
- metrics are expected to differ from Sun's official JDK build. See the
- SourceForge FreeType2 Home Page
- for more information.
-
-
--with-import-hotspot=path
- select the location to find hotspot binaries from a previous build to avoid
- building hotspot
-
-
--with-target-bits=arg
- select 32 or 64 bit build
-
-
--with-jvm-variants=variants
- select the JVM variants to build from, comma separated list that can
- include: server, client, kernel, zero and zeroshark
-
-
--with-memory-size=size
- select the RAM size that GNU make will think this system has
-
-
--with-msvcr-dll=path
- select the msvcr100.dll file to include in the Windows builds (C/C++
- runtime library for Visual Studio).
-
-
This is usually picked up automatically from the redist directories of
- Visual Studio 2010.
-
-
--with-num-cores=cores
- select the number of cores to use (processor count or CPU count)
-
-
-
-
-
-
--with-x=path
- select the location of the X11 and xrender files.
-
-
The XRender Extension Headers are required for building the OpenJDK on
- Solaris and Linux. The Linux header files are usually available from a
- "Xrender" development package, it's recommended that you try and use the
- package provided by the particular distribution of Linux that you are using.
- The Solaris XRender header files is included with the other X11 header files
- in the package SFWxwinc on new enough versions of Solaris and will be
- installed in /usr/X11/include/X11/extensions/Xrender.h or
- /usr/openwin/share/include/X11/extensions/Xrender.h
-
-
-
-
-
-
-
Make
-
-
The basic invocation of the make utility looks like:
-
-
-
make all
-
-
-
This will start the build to the output directory containing the
-"configuration" that was created by the configure script. Run make help for
-more information on the available targets.
-
-
There are some of the make targets that are of general interest:
-
-
-
empty
- build everything but no images
-
-
all
- build everything including images
-
-
all-conf
- build all configurations
-
-
images
- create complete j2sdk and j2re images
-
-
install
- install the generated images locally, typically in /usr/local
-
-
clean
- remove all files generated by make, but not those generated by configure
-
-
dist-clean
- remove all files generated by both and configure (basically killing the
- configuration)
-
-
help
- give some help on using make, including some interesting make targets
-
-
-
-
-
-
-
Testing
-
-
When the build is completed, you should see the generated binaries and
-associated files in the j2sdk-image directory in the output directory. In
-particular, the build/*/images/j2sdk-image/bin directory should contain
-executables for the OpenJDK tools and utilities for that configuration. The
-testing tool jtreg will be needed and can be found at: the jtreg
-site. The provided regression tests in the
-repositories can be run with the command:
-
-
-
cd test && make PRODUCT_HOME=`pwd`/../build/*/images/j2sdk-image all
-
-
-
-
-
-
-
Appendix A: Hints and Tips
-
-
-
-
FAQ
-
-
Q: The generated-configure.sh file looks horrible! How are you going to
-edit it?
-A: The generated-configure.sh file is generated (think "compiled") by the
-autoconf tools. The source code is in configure.ac and various .m4 files in
-common/autoconf, which are much more readable.
-
-
Q: Why is the generated-configure.sh file checked in, if it is
-generated?
-A: If it was not generated, every user would need to have the autoconf
-tools installed, and re-generate the configure file as the first step. Our
-goal is to minimize the work needed to be done by the user to start building
-OpenJDK, and to minimize the number of external dependencies required.
-
-
Q: Do you require a specific version of autoconf for regenerating
-generated-configure.sh?
-A: Yes, version 2.69 is required and should be easy enough to aquire on all
-supported operating systems. The reason for this is to avoid large spurious
-changes in generated-configure.sh.
-
-
Q: How do you regenerate generated-configure.sh after making changes to
-the input files?
-A: Regnerating generated-configure.sh should always be done using the
-script common/autoconf/autogen.sh to ensure that the correct files get
-updated. This script should also be run after mercurial tries to merge
-generated-configure.sh as a merge of the generated file is not guaranteed to
-be correct.
-
-
Q: What are the files in common/makefiles/support/* for? They look like
-gibberish.
-A: They are a somewhat ugly hack to compensate for command line length
-limitations on certain platforms (Windows, Solaris). Due to a combination of
-limitations in make and the shell, command lines containing too many files will
-not work properly. These helper files are part of an elaborate hack that will
-compress the command line in the makefile and then uncompress it safely. We're
-not proud of it, but it does fix the problem. If you have any better
-suggestions, we're all ears! :-)
-
-
Q: I want to see the output of the commands that make runs, like in the old
-build. How do I do that?
-A: You specify the LOG variable to make. There are several log levels:
-
-
-
warn -- Default and very quiet.
-
info -- Shows more progress information than warn.
-
debug -- Echos all command lines and prints all macro calls for
-compilation definitions.
-
trace -- Echos all $(shell) command lines as well.
-
-
-
Q: When do I have to re-run configure?
-A: Normally you will run configure only once for creating a
-configuration. You need to re-run configuration only if you want to change any
-configuration options, or if you pull down changes to the configure script.
-
-
Q: I have added a new source file. Do I need to modify the makefiles?
-A: Normally, no. If you want to create e.g. a new native library, you will
-need to modify the makefiles. But for normal file additions or removals, no
-changes are needed. There are certan exceptions for some native libraries where
-the source files are spread over many directories which also contain sources
-for other libraries. In these cases it was simply easier to create include
-lists rather than excludes.
-
-
Q: When I run configure --help, I see many strange options, like
---dvidir. What is this?
-A: Configure provides a slew of options by default, to all projects that
-use autoconf. Most of them are not used in OpenJDK, so you can safely ignore
-them. To list only OpenJDK specific features, use configure --help=short
-instead.
-
-
Q:configure provides OpenJDK-specific features such as --with-
-builddeps-server that are not described in this document. What about those?
-A: Try them out if you like! But be aware that most of these are
-experimental features. Many of them don't do anything at all at the moment; the
-option is just a placeholder. Others depend on pieces of code or infrastructure
-that is currently not ready for prime time.
-
-
Q: How will you make sure you don't break anything?
-A: We have a script that compares the result of the new build system with
-the result of the old. For most part, we aim for (and achieve) byte-by-byte
-identical output. There are however technical issues with e.g. native binaries,
-which might differ in a byte-by-byte comparison, even when building twice with
-the old build system. For these, we compare relevant aspects (e.g. the symbol
-table and file size). Note that we still don't have 100% equivalence, but we're
-close.
-
-
Q: I noticed this thing X in the build that looks very broken by design.
-Why don't you fix it?
-A: Our goal is to produce a build output that is as close as technically
-possible to the old build output. If things were weird in the old build, they
-will be weird in the new build. Often, things were weird before due to
-obscurity, but in the new build system the weird stuff comes up to the surface.
-The plan is to attack these things at a later stage, after the new build system
-is established.
-
-
Q: The code in the new build system is not that well-structured. Will you
-fix this?
-A: Yes! The new build system has grown bit by bit as we converted the old
-system. When all of the old build system is converted, we can take a step back
-and clean up the structure of the new build system. Some of this we plan to do
-before replacing the old build system and some will need to wait until after.
-
-
Q: Is anything able to use the results of the new build's default make
-target?
-A: Yes, this is the minimal (or roughly minimal) set of compiled output
-needed for a developer to actually execute the newly built JDK. The idea is
-that in an incremental development fashion, when doing a normal make, you
-should only spend time recompiling what's changed (making it purely
-incremental) and only do the work that's needed to actually run and test your
-code. The packaging stuff that is part of the images target is not needed for
-a normal developer who wants to test his new code. Even if it's quite fast,
-it's still unnecessary. We're targeting sub-second incremental rebuilds! ;-)
-(Or, well, at least single-digit seconds...)
-
-
Q: I usually set a specific environment variable when building, but I can't
-find the equivalent in the new build. What should I do?
-A: It might very well be that we have neglected to add support for an
-option that was actually used from outside the build system. Email us and we
-will add support for it!
-
-
-
-
Build Performance Tips
-
-
Building OpenJDK requires a lot of horsepower. Some of the build tools can be
-adjusted to utilize more or less of resources such as parallel threads and
-memory. The configure script analyzes your system and selects reasonable
-values for such options based on your hardware. If you encounter resource
-problems, such as out of memory conditions, you can modify the detected values
-with:
-
-
-
--with-num-cores -- number of cores in the build system, e.g.
---with-num-cores=8
-
--with-memory-size -- memory (in MB) available in the build system,
-e.g. --with-memory-size=1024
-
-
-
It might also be necessary to specify the JVM arguments passed to the Bootstrap
-JDK, using e.g. --with-boot-jdk-jvmargs="-Xmx8G -enableassertions". Doing
-this will override the default JVM arguments passed to the Bootstrap JDK.
-
-
One of the top goals of the new build system is to improve the build
-performance and decrease the time needed to build. This will soon also apply to
-the java compilation when the Smart Javac wrapper is making its way into jdk8.
-It can be tried in the build-infra repository already. You are likely to find
-that the new build system is faster than the old one even without this feature.
-
-
At the end of a successful execution of configure, you will get a performance
-summary, indicating how well the build will perform. Here you will also get
-performance hints. If you want to build fast, pay attention to those!
-
-
Building with ccache
-
-
A simple way to radically speed up compilation of native code
-(typically hotspot and native libraries in JDK) is to install
-ccache. This will cache and reuse prior compilation results, if the
-source code is unchanged. However, ccache versions prior to 3.1.4 does
-not work correctly with the precompiled headers used in OpenJDK. So if
-your platform supports ccache at 3.1.4 or later, we highly recommend
-installing it. This is currently only supported on linux.
-
-
Building on local disk
-
-
If you are using network shares, e.g. via NFS, for your source code, make sure
-the build directory is situated on local disk. The performance penalty is
-extremely high for building on a network share, close to unusable.
-
-
Building only one JVM
-
-
The old build builds multiple JVMs on 32-bit systems (client and server; and on
-Windows kernel as well). In the new build we have changed this default to only
-build server when it's available. This improves build times for those not
-interested in multiple JVMs. To mimic the old behavior on platforms that
-support it, use --with-jvm-variants=client,server.
-
-
Selecting the number of cores to build on
-
-
By default, configure will analyze your machine and run the make process in
-parallel with as many threads as you have cores. This behavior can be
-overridden, either "permanently" (on a configure basis) using
---with-num-cores=N or for a single build only (on a make basis), using
-make JOBS=N.
-
-
If you want to make a slower build just this time, to save some CPU power for
-other processes, you can run e.g. make JOBS=2. This will force the makefiles
-to only run 2 parallel processes, or even make JOBS=1 which will disable
-parallelism.
-
-
If you want to have it the other way round, namely having slow builds default
-and override with fast if you're impatient, you should call configure with
---with-num-cores=2, making 2 the default. If you want to run with more cores,
-run make JOBS=8
-
-
-
-
Troubleshooting
-
-
Solving build problems
-
-
If the build fails (and it's not due to a compilation error in a source file
-you've changed), the first thing you should do is to re-run the build with more
-verbosity. Do this by adding LOG=debug to your make command line.
-
-
The build log (with both stdout and stderr intermingled, basically the same as
-you see on your console) can be found as build.log in your build directory.
-
-
You can ask for help on build problems with the new build system on either the
-build-dev or the
-build-infra-dev
-mailing lists. Please include the relevant parts of the build log.
-
-
A build can fail for any number of reasons. Most failures are a result of
-trying to build in an environment in which all the pre-build requirements have
-not been met. The first step in troubleshooting a build failure is to recheck
-that you have satisfied all the pre-build requirements for your platform.
-Scanning the configure log is a good first step, making sure that what it
-found makes sense for your system. Look for strange error messages or any
-difficulties that configure had in finding things.
-
-
Some of the more common problems with builds are briefly described below, with
-suggestions for remedies.
-
-
-
Corrupted Bundles on Windows:
-Some virus scanning software has been known to corrupt the downloading of
-zip bundles. It may be necessary to disable the 'on access' or 'real time'
-virus scanning features to prevent this corruption. This type of 'real time'
-virus scanning can also slow down the build process significantly.
-Temporarily disabling the feature, or excluding the build output directory
-may be necessary to get correct and faster builds.
-
Slow Builds:
-If your build machine seems to be overloaded from too many simultaneous C++
-compiles, try setting the JOBS=1 on the make command line. Then try
-increasing the count slowly to an acceptable level for your system. Also:
-
-
Creating the javadocs can be very slow, if you are running javadoc, consider
-skipping that step.
-
-
Faster CPUs, more RAM, and a faster DISK usually helps. The VM build tends
-to be CPU intensive (many C++ compiles), and the rest of the JDK will often
-be disk intensive.
-
-
Faster compiles are possible using a tool called
-ccache.
-
File time issues:
-If you see warnings that refer to file time stamps, e.g.
-
-
-
Warning message:File 'xxx' has modification time in the future.
-Warning message:Clock skew detected. Your build may be incomplete.
-
-
-
These warnings can occur when the clock on the build machine is out of sync
-with the timestamps on the source files. Other errors, apparently unrelated
-but in fact caused by the clock skew, can occur along with the clock skew
-warnings. These secondary errors may tend to obscure the fact that the true
-root cause of the problem is an out-of-sync clock.
-
-
If you see these warnings, reset the clock on the build machine, run
-"gmake clobber" or delete the directory containing the build output, and
-restart the build from the beginning.
-
Error message: Trouble writing out table to disk
-Increase the amount of swap space on your build machine. This could be
-caused by overloading the system and it may be necessary to use:
-
-
-
make JOBS=1
-
-
-
to reduce the load on the system.
-
Error Message: libstdc++ not found:
-This is caused by a missing libstdc++.a library. This is installed as part
-of a specific package (e.g. libstdc++.so.devel.386). By default some 64-bit
-Linux versions (e.g. Fedora) only install the 64-bit version of the
-libstdc++ package. Various parts of the JDK build require a static link of
-the C++ runtime libraries to allow for maximum portability of the built
-images.
-
Linux Error Message: cannot restore segment prot after reloc
-This is probably an issue with SELinux (See SELinux on
-Wikipedia). Parts of the VM is built
-without the -fPIC for performance reasons.
-
-
To completely disable SELinux:
-
-
-
$ su root
-
# system-config-securitylevel
-
In the window that appears, select the SELinux tab
-
Disable SELinux
-
-
-
Alternatively, instead of completely disabling it you could disable just
-this one check.
-
-
-
Select System->Administration->SELinux Management
-
In the SELinux Management Tool which appears, select "Boolean" from the
-menu on the left
-
Expand the "Memory Protection" group
-
Check the first item, labeled "Allow all unconfined executables to use
-libraries requiring text relocation ..."
-
-
Windows Error Messages:
-*** fatal error - couldn't allocate heap, ...
-rm fails with "Directory not empty"
-unzip fails with "cannot create ... Permission denied"
-unzip fails with "cannot create ... Error 50"
Windows Error Message: spawn failed
-Try rebooting the system, or there could be some kind of issue with the disk
-or disk partition being used. Sometimes it comes with a "Permission Denied"
-message.
-
-
-
-
-
-
-
Appendix B: GNU make
-
-
The Makefiles in the OpenJDK are only valid when used with the GNU version of
-the utility command make (usually called gmake on Solaris). A few notes
-about using GNU make:
-
-
-
You need GNU make version 3.81 or newer. If the GNU make utility on your
-systems is not 3.81 or newer, see "Building GNU make".
-
Place the location of the GNU make binary in the PATH.
-
Solaris: Do NOT use /usr/bin/make on Solaris. If your Solaris system
-has the software from the Solaris Developer Companion CD installed, you
-should try and use gmake which will be located in either the /usr/bin,
-/opt/sfw/bin or /usr/sfw/bin directory.
-
Windows: Make sure you start your build inside a bash shell.
-
Mac OS X: The XCode "command line tools" must be installed on your Mac.
-
-
-
Information on GNU make, and access to ftp download sites, are available on the
-GNU make web site . The latest
-source to GNU make is available at
-ftp.gnu.org/pub/gnu/make/.
-
-
-
-
Building GNU make
-
-
First step is to get the GNU make 3.81 or newer source from
-ftp.gnu.org/pub/gnu/make/. Building is a
-little different depending on the OS but is basically done with:
-
-
bash ./configure
- make
-
-
-
-
-
-
-
Appendix C: Build Environments
-
-
Minimum Build Environments
-
-
This file often describes specific requirements for what we call the "minimum
-build environments" (MBE) for this specific release of the JDK. What is listed
-below is what the Oracle Release Engineering Team will use to build the Oracle
-JDK product. Building with the MBE will hopefully generate the most compatible
-bits that install on, and run correctly on, the most variations of the same
-base OS and hardware architecture. In some cases, these represent what is often
-called the least common denominator, but each Operating System has different
-aspects to it.
-
-
In all cases, the Bootstrap JDK version minimum is critical, we cannot
-guarantee builds will work with older Bootstrap JDK's. Also in all cases, more
-RAM and more processors is better, the minimums listed below are simply
-recommendations.
-
-
With Solaris and Mac OS X, the version listed below is the oldest release we
-can guarantee builds and works, and the specific version of the compilers used
-could be critical.
-
-
With Windows the critical aspect is the Visual Studio compiler used, which due
-to it's runtime, generally dictates what Windows systems can do the builds and
-where the resulting bits can be used.
-
-
NOTE: We expect a change here off these older Windows OS releases and to a
-'less older' one, probably Windows 2008R2 X64.
-
-
With Linux, it was just a matter of picking a stable distribution that is a
-good representative for Linux in general.
-
-
NOTE: We expect a change here from Fedora 9 to something else, but it has not
-been completely determined yet, possibly Ubuntu 12.04 X64, unbiased community
-feedback would be welcome on what a good choice would be here.
-
-
It is understood that most developers will NOT be using these specific
-versions, and in fact creating these specific versions may be difficult due to
-the age of some of this software. It is expected that developers are more often
-using the more recent releases and distributions of these operating systems.
-
-
Compilation problems with newer or different C/C++ compilers is a common
-problem. Similarly, compilation problems related to changes to the
-/usr/include or system header files is also a common problem with older,
-newer, or unreleased OS versions. Please report these types of problems as bugs
-so that they can be dealt with accordingly.
-
-
-
-
-
-
Base OS and Architecture
-
OS
-
C/C++ Compiler
-
Bootstrap JDK
-
Processors
-
RAM Minimum
-
DISK Needs
-
-
-
-
-
Linux X86 (32-bit) and X64 (64-bit)
-
Fedora 9
-
gcc 4.3
-
JDK 7u7
-
2 or more
-
1 GB
-
6 GB
-
-
-
Solaris SPARC (32-bit) and SPARCV9 (64-bit)
-
Solaris 10 Update 6
-
Studio 12 Update 1 + patches
-
JDK 7u7
-
4 or more
-
4 GB
-
8 GB
-
-
-
Solaris X86 (32-bit) and X64 (64-bit)
-
Solaris 10 Update 6
-
Studio 12 Update 1 + patches
-
JDK 7u7
-
4 or more
-
4 GB
-
8 GB
-
-
-
Windows X86 (32-bit)
-
Windows XP
-
Microsoft Visual Studio C++ 2010 Professional Edition
-
JDK 7u7
-
2 or more
-
2 GB
-
6 GB
-
-
-
Windows X64 (64-bit)
-
Windows Server 2003 - Enterprise x64 Edition
-
Microsoft Visual Studio C++ 2010 Professional Edition
-
JDK 7u7
-
2 or more
-
2 GB
-
6 GB
-
-
-
Mac OS X X64 (64-bit)
-
Mac OS X 10.7 "Lion"
-
XCode 4.5.2 or newer
-
JDK 7u7
-
2 or more
-
4 GB
-
6 GB
-
-
-
-
-
-
-
-
-
-
Specific Developer Build Environments
-
-
We won't be listing all the possible environments, but we will try to provide
-what information we have available to us.
-
-
NOTE: The community can help out by updating this part of the document.
-
-
Fedora
-
-
After installing the latest Fedora you need to
-install several build dependencies. The simplest way to do it is to execute the
-following commands as user root:
After installing CentOS 5.5 you need to make sure you
-have the following Development bundles installed:
-
-
-
Development Libraries
-
Development Tools
-
Java Development
-
X Software Development (Including XFree86-devel)
-
-
-
Plus the following packages:
-
-
-
cups devel: Cups Development Package
-
alsa devel: Alsa Development Package
-
Xi devel: libXi.so Development Package
-
-
-
The freetype 2.3 packages don't seem to be available, but the freetype 2.3
-sources can be downloaded, built, and installed easily enough from the
-freetype site. Build and install
-with something like:
-
-
bash ./configure
- make
- sudo -u root make install
-
-
-
Mercurial packages could not be found easily, but a Google search should find
-ones, and they usually include Python if it's needed.
-
-
Debian 5.0 (Lenny)
-
-
After installing Debian 5 you need to install several
-build dependencies. The simplest way to install the build dependencies is to
-execute the following commands as user root:
After installing OpenSUSE 11.1 you need to install
-several build dependencies. The simplest way to install the build dependencies
-is to execute the following commands:
Finally, you need to unset the JAVA_HOME environment variable:
-
-
export -n JAVA_HOME`
-
-
-
Mandriva Linux One 2009 Spring
-
-
After installing Mandriva Linux One 2009 Spring you need
-to install several build dependencies. The simplest way to install the build
-dependencies is to execute the following commands as user root:
-
-
urpmi java-1.7.0-openjdk-devel make gcc gcc-c++ freetype-devel zip unzip
- libcups2-devel libxrender1-devel libalsa2-devel libstc++-static-devel
- libxtst6-devel libxi-devel
-
-
-
In addition, it is necessary to set a few environment variables for the build:
After installing OpenSolaris 2009.06 you need to
-install several build dependencies. The simplest way to install the build
-dependencies is to execute the following commands:
-
-
diff --git a/common/autoconf/basics.m4 b/common/autoconf/basics.m4
index 692ef831abf..66e69ad5a8e 100644
--- a/common/autoconf/basics.m4
+++ b/common/autoconf/basics.m4
@@ -427,6 +427,7 @@ AC_DEFUN_ONCE([BASIC_SETUP_FUNDAMENTAL_TOOLS],
BASIC_PATH_PROGS(DF, df)
BASIC_PATH_PROGS(SETFILE, SetFile)
BASIC_PATH_PROGS(CPIO, [cpio bsdcpio])
+ BASIC_PATH_PROGS(PANDOC, pandoc)
])
# Setup basic configuration paths, and platform-specific stuff related to PATHs.
diff --git a/common/autoconf/generated-configure.sh b/common/autoconf/generated-configure.sh
index fa1989a2967..be1bfeca76e 100644
--- a/common/autoconf/generated-configure.sh
+++ b/common/autoconf/generated-configure.sh
@@ -949,6 +949,7 @@ build_os
build_vendor
build_cpu
build
+PANDOC
CPIO
SETFILE
DF
@@ -1017,7 +1018,6 @@ infodir
docdir
oldincludedir
includedir
-runstatedir
localstatedir
sharedstatedir
sysconfdir
@@ -1180,6 +1180,7 @@ READLINK
DF
SETFILE
CPIO
+PANDOC
UNZIP
ZIP
LDD
@@ -1263,7 +1264,6 @@ datadir='${datarootdir}'
sysconfdir='${prefix}/etc'
sharedstatedir='${prefix}/com'
localstatedir='${prefix}/var'
-runstatedir='${localstatedir}/run'
includedir='${prefix}/include'
oldincludedir='/usr/include'
docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
@@ -1516,15 +1516,6 @@ do
| -silent | --silent | --silen | --sile | --sil)
silent=yes ;;
- -runstatedir | --runstatedir | --runstatedi | --runstated \
- | --runstate | --runstat | --runsta | --runst | --runs \
- | --run | --ru | --r)
- ac_prev=runstatedir ;;
- -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \
- | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \
- | --run=* | --ru=* | --r=*)
- runstatedir=$ac_optarg ;;
-
-sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
ac_prev=sbindir ;;
-sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
@@ -1662,7 +1653,7 @@ fi
for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \
datadir sysconfdir sharedstatedir localstatedir includedir \
oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
- libdir localedir mandir runstatedir
+ libdir localedir mandir
do
eval ac_val=\$$ac_var
# Remove trailing slashes.
@@ -1815,7 +1806,6 @@ Fine tuning of the installation directories:
--sysconfdir=DIR read-only single-machine data [PREFIX/etc]
--sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com]
--localstatedir=DIR modifiable single-machine data [PREFIX/var]
- --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run]
--libdir=DIR object code libraries [EPREFIX/lib]
--includedir=DIR C header files [PREFIX/include]
--oldincludedir=DIR C header files for non-gcc [/usr/include]
@@ -2078,6 +2068,7 @@ Some influential environment variables:
DF Override default value for DF
SETFILE Override default value for SETFILE
CPIO Override default value for CPIO
+ PANDOC Override default value for PANDOC
UNZIP Override default value for UNZIP
ZIP Override default value for ZIP
LDD Override default value for LDD
@@ -4450,7 +4441,7 @@ VS_TOOLSET_SUPPORTED_2022=true
#CUSTOM_AUTOCONF_INCLUDE
# Do not change or remove the following line, it is needed for consistency checks:
-DATE_WHEN_GENERATED=1670219878
+DATE_WHEN_GENERATED=1694011184
###############################################################################
#
@@ -13510,6 +13501,192 @@ $as_echo "$tool_specified" >&6; }
+ # Publish this variable in the help.
+
+
+ if test "x$PANDOC" = x; then
+ # The variable is not set by user, try to locate tool using the code snippet
+ for ac_prog in pandoc
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_PANDOC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $PANDOC in
+ [\\/]* | ?:[\\/]*)
+ ac_cv_path_PANDOC="$PANDOC" # Let the user override the test with a path.
+ ;;
+ *)
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path_PANDOC="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ ;;
+esac
+fi
+PANDOC=$ac_cv_path_PANDOC
+if test -n "$PANDOC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PANDOC" >&5
+$as_echo "$PANDOC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$PANDOC" && break
+done
+
+ else
+ # The variable is set, but is it from the command line or the environment?
+
+ # Try to remove the string !PANDOC! from our list.
+ try_remove_var=${CONFIGURE_OVERRIDDEN_VARIABLES//!PANDOC!/}
+ if test "x$try_remove_var" = "x$CONFIGURE_OVERRIDDEN_VARIABLES"; then
+ # If it failed, the variable was not from the command line. Ignore it,
+ # but warn the user (except for BASH, which is always set by the calling BASH).
+ if test "xPANDOC" != xBASH; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Ignoring value of PANDOC from the environment. Use command line variables instead." >&5
+$as_echo "$as_me: WARNING: Ignoring value of PANDOC from the environment. Use command line variables instead." >&2;}
+ fi
+ # Try to locate tool using the code snippet
+ for ac_prog in pandoc
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_PANDOC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $PANDOC in
+ [\\/]* | ?:[\\/]*)
+ ac_cv_path_PANDOC="$PANDOC" # Let the user override the test with a path.
+ ;;
+ *)
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path_PANDOC="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ ;;
+esac
+fi
+PANDOC=$ac_cv_path_PANDOC
+if test -n "$PANDOC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PANDOC" >&5
+$as_echo "$PANDOC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$PANDOC" && break
+done
+
+ else
+ # If it succeeded, then it was overridden by the user. We will use it
+ # for the tool.
+
+ # First remove it from the list of overridden variables, so we can test
+ # for unknown variables in the end.
+ CONFIGURE_OVERRIDDEN_VARIABLES="$try_remove_var"
+
+ # Check if the provided tool contains a complete path.
+ tool_specified="$PANDOC"
+ tool_basename="${tool_specified##*/}"
+ if test "x$tool_basename" = "x$tool_specified"; then
+ # A command without a complete path is provided, search $PATH.
+ { $as_echo "$as_me:${as_lineno-$LINENO}: Will search for user supplied tool PANDOC=$tool_basename" >&5
+$as_echo "$as_me: Will search for user supplied tool PANDOC=$tool_basename" >&6;}
+ # Extract the first word of "$tool_basename", so it can be a program name with args.
+set dummy $tool_basename; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_PANDOC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $PANDOC in
+ [\\/]* | ?:[\\/]*)
+ ac_cv_path_PANDOC="$PANDOC" # Let the user override the test with a path.
+ ;;
+ *)
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path_PANDOC="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ ;;
+esac
+fi
+PANDOC=$ac_cv_path_PANDOC
+if test -n "$PANDOC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PANDOC" >&5
+$as_echo "$PANDOC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ if test "x$PANDOC" = x; then
+ as_fn_error $? "User supplied tool $tool_basename could not be found" "$LINENO" 5
+ fi
+ else
+ # Otherwise we believe it is a complete path. Use it as it is.
+ { $as_echo "$as_me:${as_lineno-$LINENO}: Will use user supplied tool PANDOC=$tool_specified" >&5
+$as_echo "$as_me: Will use user supplied tool PANDOC=$tool_specified" >&6;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PANDOC" >&5
+$as_echo_n "checking for PANDOC... " >&6; }
+ if test ! -x "$tool_specified"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5
+$as_echo "not found" >&6; }
+ as_fn_error $? "User supplied tool PANDOC=$tool_specified does not exist or is not executable" "$LINENO" 5
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $tool_specified" >&5
+$as_echo "$tool_specified" >&6; }
+ fi
+ fi
+ fi
+
+
+
+
# Now we can determine OpenJDK build and target platforms. This is required to
# have early on.
# Make sure we can run config.sub.
diff --git a/common/autoconf/spec.gmk.in b/common/autoconf/spec.gmk.in
index a9cea037ae5..f59a5f94d30 100644
--- a/common/autoconf/spec.gmk.in
+++ b/common/autoconf/spec.gmk.in
@@ -549,6 +549,7 @@ LN:=@LN@
MKDIR:=@MKDIR@
MV:=@MV@
NAWK:=@NAWK@
+PANDOC:=@PANDOC@
PRINTF:=@PRINTF@
PWD:=@THEPWDCMD@
RM:=@RM@
diff --git a/common/autoconf/version-numbers b/common/autoconf/version-numbers
index e53f02edab2..fe08eae1440 100644
--- a/common/autoconf/version-numbers
+++ b/common/autoconf/version-numbers
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -26,7 +26,7 @@
JDK_MAJOR_VERSION=1
JDK_MINOR_VERSION=8
JDK_MICRO_VERSION=0
-JDK_UPDATE_VERSION=392
+JDK_UPDATE_VERSION=402
LAUNCHER_NAME=openjdk
PRODUCT_NAME=OpenJDK
PRODUCT_SUFFIX="Runtime Environment"
diff --git a/common/bin/update-build-readme.sh b/common/bin/update-build-readme.sh
deleted file mode 100644
index f16e289b32a..00000000000
--- a/common/bin/update-build-readme.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-
-# Get an absolute path to this script, since that determines the top-level
-# directory.
-this_script_dir=`dirname $0`
-TOPDIR=`cd $this_script_dir/../.. > /dev/null && pwd`
-
-GREP=grep
-MD_FILE=$TOPDIR/README-builds.md
-HTML_FILE=$TOPDIR/README-builds.html
-
-# Locate the markdown processor tool and check that it is the correct version.
-locate_markdown_processor() {
- if [ -z "$MARKDOWN" ]; then
- MARKDOWN=`which markdown 2> /dev/null`
- if [ -z "$MARKDOWN" ]; then
- echo "Error: Cannot locate markdown processor" 1>&2
- exit 1
- fi
- fi
-
- # Test version
- MARKDOWN_VERSION=`$MARKDOWN -version | $GREP version`
- if [ "x$MARKDOWN_VERSION" != "xThis is Markdown, version 1.0.1." ]; then
- echo "Error: Expected markdown version 1.0.1." 1>&2
- echo "Actual version found: $MARKDOWN_VERSION" 1>&2
- echo "Download markdown here: https://daringfireball.net/projects/markdown/" 1>&2
- exit 1
- fi
-
-}
-
-# Verify that the source markdown file looks sound.
-verify_source_code() {
- TOO_LONG_LINES=`$GREP -E -e '^.{80}.+$' $MD_FILE`
- if [ "x$TOO_LONG_LINES" != x ]; then
- echo "Warning: The following lines are longer than 80 characters:"
- $GREP -E -e '^.{80}.+$' $MD_FILE
- fi
-}
-
-# Convert the markdown file to html format.
-process_source() {
- echo "Generating html file from markdown"
- cat > $HTML_FILE << END
-
-
- OpenJDK Build README
-
-
-END
- ${MARKDOWN} $MD_FILE >> $HTML_FILE
- cat >> $HTML_FILE <
-
-END
- echo "Done"
-}
-
-locate_markdown_processor
-verify_source_code
-process_source
diff --git a/corba/README b/corba/README
deleted file mode 100644
index 56825f5ca08..00000000000
--- a/corba/README
+++ /dev/null
@@ -1,14 +0,0 @@
-README:
- This file should be located at the top of the corba Mercurial repository.
-
- See http://openjdk.java.net/ for more information about the OpenJDK.
-
- See ../README-builds.html for complete details on build machine requirements.
-
-Simple Build Instructions:
-
- cd make && gnumake
-
- The files that will be imported into the jdk build will be in the "dist"
- directory.
-
diff --git a/doc/building.html b/doc/building.html
new file mode 100644
index 00000000000..84d2aa3ace2
--- /dev/null
+++ b/doc/building.html
@@ -0,0 +1,715 @@
+
+
+
+
+
+
+ OpenJDK Build README
+
+
+
+
+
+
OpenJDK Build README
+
+
+
+
Introduction
+
This README file contains build instructions for the OpenJDK. Building the source code for the OpenJDK requires a certain degree of technical expertise.
+
!!!!!!!!!!!!!!! THIS IS A MAJOR RE-WRITE of this document. !!!!!!!!!!!!!
+
Some Headlines:
+
+
The build is now a “configure && make” style build
+
Any GNU make 3.81 or newer should work
+
The build should scale, i.e. more processors should cause the build to be done in less wall-clock time
+
Nested or recursive make invocations have been significantly reduced, as has the total fork/exec or spawning of sub processes during the build
+
Windows MKS usage is no longer supported
+
Windows Visual Studio vsvars*.bat and vcvars*.bat files are run automatically
+
Ant is no longer used when building the OpenJDK
+
Use of ALT_* environment variables for configuring the build is no longer supported
The OpenJDK sources are maintained with the revision control system Mercurial. If you are new to Mercurial, please see the Beginner Guides or refer to the Mercurial Book. The first few chapters of the book provide an excellent overview of Mercurial, what it is and how it works.
To get the entire set of OpenJDK Mercurial repositories use the script get_source.sh located in the root repository:
+
hg clone http://hg.openjdk.java.net/jdk8/jdk8 YourOpenJDK
+ cd YourOpenJDK
+ bash ./get_source.sh
+
Once you have all the repositories, keep in mind that each repository is its own independent repository. You can also re-run ./get_source.sh anytime to pull over all the latest changesets in all the repositories. This set of nested repositories has been given the term “forest” and there are various ways to apply the same hg command to each of the repositories. For example, the script make/scripts/hgforest.sh can be used to repeat the same hg command on every repository, e.g.
+
cd YourOpenJDK
+ bash ./make/scripts/hgforest.sh status
+
Repositories
+
The set of repositories and what they contain:
+
+
. (root) contains common configure and makefile logic
+
hotspot contains source code and make files for building the OpenJDK Hotspot Virtual Machine
+
langtools contains source code for the OpenJDK javac and language tools
+
jdk contains source code and make files for building the OpenJDK runtime libraries and misc files
+
jaxp contains source code for the OpenJDK JAXP functionality
+
jaxws contains source code for the OpenJDK JAX-WS functionality
+
corba contains source code for the OpenJDK Corba functionality
+
nashorn contains source code for the OpenJDK JavaScript implementation
+
+
Repository Source Guidelines
+
There are some very basic guidelines:
+
+
Use of whitespace in source files (.java, .c, .h, .cpp, and .hpp files) is restricted. No TABs, no trailing whitespace on lines, and files should not terminate in more than one blank line.
+
Files with execute permissions should not be added to the source repositories.
+
All generated files need to be kept isolated from the files maintained or managed by the source control system. The standard area for generated files is the top level build/ directory.
+
The default build process should be to build the product and nothing else, in one form, e.g. a product (optimized), debug (non-optimized, -g plus assert logic), or fastdebug (optimized, -g plus assert logic).
+
The .hgignore file in each repository must exist and should include ^build/, ^dist/ and optionally any nbproject/private directories. It should NEVER include anything in the src/ or test/ or any managed directory area of a repository.
+
Directory names and file names should never contain blanks or non-printing characters.
+
Generated source or binary files should NEVER be added to the repository (that includes javah output). There are some exceptions to this rule, in particular with some of the generated configure scripts.
+
Files not needed for typical building or testing of the repository should not be added to the repository.
+
+
+
Building
+
The very first step in building the OpenJDK is making sure the system itself has everything it needs to do OpenJDK builds. Once a system is setup, it generally doesn’t need to be done again.
+
Building the OpenJDK is now done with running a configure script which will try and find and verify you have everything you need, followed by running make, e.g.
+
+
bash ./configure
+make all
+
+
Where possible the configure script will attempt to located the various components in the default locations or via component specific variable settings. When the normal defaults fail or components cannot be found, additional configure options may be necessary to help configure find the necessary tools for the build, or you may need to re-visit the setup of your system due to missing software packages.
+
NOTE: The configure script file does not have execute permissions and will need to be explicitly run with bash, see the source guidelines.
+
+
System Setup
+
Before even attempting to use a system to build the OpenJDK there are some very basic system setups needed. For all systems:
+
+
Be sure the GNU make utility is version 3.81 or newer, e.g. run “make -version”
+
+
Install a Bootstrap JDK. All OpenJDK builds require access to a previously released JDK called the bootstrap JDK or boot JDK. The general rule is that the bootstrap JDK must be an instance of the previous major release of the JDK. In addition, there may be a requirement to use a release at or beyond a particular update level.
+
Building JDK 8 requires use of a version of JDK 7 this is at Update 7 or newer. JDK 8 developers should not use JDK 8 as the boot JDK, to ensure that JDK 8 dependencies are not introduced into the parts of the system that are built with JDK 7.
+
The JDK 7 binaries can be downloaded from Oracle’s JDK 7 download site. For build performance reasons it is very important that this bootstrap JDK be made available on the local disk of the machine doing the build. You should add its bin directory to the PATH environment variable. If configure has any issues finding this JDK, you may need to use the configure option --with-boot-jdk.
+
Ensure that GNU make, the Bootstrap JDK, and the compilers are all in your PATH environment variable.
Install XCode 4.5.2 and also install the “Command line tools” found under the preferences pane “Downloads”
+
+
Linux
+
With Linux, try and favor the system packages over building your own or getting packages from other areas. Most Linux builds should be possible with the system’s available packages.
+
Note that some Linux systems have a habit of pre-populating your environment variables for you, for example JAVA_HOME might get pre-defined for you to refer to the JDK installed on your Linux system. You will need to unset JAVA_HOME. It’s a good idea to run env and verify the environment variables you are getting from the default system settings make sense for building the OpenJDK.
+
Solaris
+
Studio Compilers
+
At a minimum, the Studio 12 Update 1 Compilers (containing version 5.10 of the C and C++ compilers) is required, including specific patches.
+
The Solaris SPARC patch list is:
+
+
118683-05: SunOS 5.10: Patch for profiling libraries and assembler
+
119963-21: SunOS 5.10: Shared library patch for C++
141858-06: Sun Studio 12 Update 1_x86: Sun Compiler Common patch for x86 backend
+
128229-09: Sun Studio 12 Update 1_x86: Patch for C++ Compiler
+
142363-05: Sun Studio 12 Update 1_x86: Patch for C Compiler
+
142368-01: Sun Studio 12.1_x86: Patch for Performance Analyzer Tools
+
+
Place the bin directory in PATH.
+
The Oracle Solaris Studio Express compilers at: Oracle Solaris Studio Express Download site are also an option, although these compilers have not been extensively used yet.
+
Windows
+
Windows Unix Toolkit
+
Building on Windows requires a Unix-like environment, notably a Unix-like shell. There are several such environments available of which Cygwin and MinGW/MSYS are currently supported for the OpenJDK build. One of the differences of these systems from standard Windows tools is the way they handle Windows path names, particularly path names which contain spaces, backslashes as path separators and possibly drive letters. Depending on the use case and the specifics of each environment these path problems can be solved by a combination of quoting whole paths, translating backslashes to forward slashes, escaping backslashes with additional backslashes and translating the path names to their “8.3” version.
+
CYGWIN
+
CYGWIN is an open source, Linux-like environment which tries to emulate a complete POSIX layer on Windows. It tries to be smart about path names and can usually handle all kinds of paths if they are correctly quoted or escaped although internally it maps drive letters <drive>: to a virtual directory /cygdrive/<drive>.
+
You can always use the cygpath utility to map pathnames with spaces or the backslash character into the C:/ style of pathname (called ‘mixed’), e.g. cygpath -s -m "<path>".
+
Note that the use of CYGWIN creates a unique problem with regards to setting PATH. Normally on Windows the PATH variable contains directories separated with the “;” character (Solaris and Linux use “:”). With CYGWIN, it uses “:”, but that means that paths like “C:/path” cannot be placed in the CYGWIN version of PATH and instead CYGWIN uses something like /cygdrive/c/path which CYGWIN understands, but only CYGWIN understands.
+
The OpenJDK build requires CYGWIN version 1.7.16 or newer. Information about CYGWIN can be obtained from the CYGWIN website at www.cygwin.com.
+
By default CYGWIN doesn’t install all the tools required for building the OpenJDK. Along with the default installation, you need to install the following tools.
+
+
+
+
Binary Name
+
Category
+
Package
+
Description
+
+
+
+
+
ar.exe
+
Devel
+
binutils
+
The GNU assembler, linker and binary utilities
+
+
+
make.exe
+
Devel
+
make
+
The GNU version of the ‘make’ utility built for CYGWIN
+
+
+
m4.exe
+
Interpreters
+
m4
+
GNU implementation of the traditional Unix macro processor
+
+
+
cpio.exe
+
Utils
+
cpio
+
A program to manage archives of files
+
+
+
gawk.exe
+
Utils
+
awk
+
Pattern-directed scanning and processing language
+
+
+
file.exe
+
Utils
+
file
+
Determines file type using ‘magic’ numbers
+
+
+
zip.exe
+
Archive
+
zip
+
Package and compress (archive) files
+
+
+
unzip.exe
+
Archive
+
unzip
+
Extract compressed files in a ZIP archive
+
+
+
free.exe
+
System
+
procps
+
Display amount of free and used memory in the system
+
+
+
+
Note that the CYGWIN software can conflict with other non-CYGWIN software on your Windows system. CYGWIN provides a FAQ for known issues and problems, of particular interest is the section on BLODA (applications that interfere with CYGWIN).
+
MinGW/MSYS
+
MinGW (“Minimalist GNU for Windows”) is a collection of free Windows specific header files and import libraries combined with GNU toolsets that allow one to produce native Windows programs that do not rely on any 3rd-party C runtime DLLs. MSYS is a supplement to MinGW which allows building applications and programs which rely on traditional UNIX tools to be present. Among others this includes tools like bash and make. See MinGW/MSYS for more information.
+
Like Cygwin, MinGW/MSYS can handle different types of path formats. They are internally converted to paths with forward slashes and drive letters <drive>: replaced by a virtual directory /<drive>. Additionally, MSYS automatically detects binaries compiled for the MSYS environment and feeds them with the internal, Unix-style path names. If native Windows applications are called from within MSYS programs their path arguments are automatically converted back to Windows style path names with drive letters and backslashes as path separators. This may cause problems for Windows applications which use forward slashes as parameter separator (e.g. cl /nologo /I) because MSYS may wrongly replace such parameters by drive letters.
+
In addition to the tools which will be installed by default, you have to manually install the msys-zip and msys-unzip packages. This can be easily done with the MinGW command line installer:
The 32-bit and 64-bit OpenJDK Windows build requires Microsoft Visual Studio C++ 2010 (VS2010) Professional Edition or Express compiler. The compiler and other tools are expected to reside in the location defined by the variable VS100COMNTOOLS which is set by the Microsoft Visual Studio installer.
+
Only the C++ part of VS2010 is needed. Try to let the installation go to the default install directory. Always reboot your system after installing VS2010. The system environment variable VS100COMNTOOLS should be set in your environment.
+
Make sure that TMP and TEMP are also set in the environment and refer to Windows paths that exist, like C:\temp, not /tmp, not /cygdrive/c/temp, and not C:/temp. C:\temp is just an example, it is assumed that this area is private to the user, so by default after installs you should see a unique user path in these variables.
+
Mac OS X
+
Make sure you get the right XCode version.
+
+
Configure
+
The basic invocation of the configure script looks like:
+
+
bash ./configure [options]
+
+
This will create an output directory containing the “configuration” and setup an area for the build result. This directory typically looks like:
+
+
build/linux-x64-normal-server-release
+
+
configure will try to figure out what system you are running on and where all necessary build components are. If you have all prerequisites for building installed, it should find everything. If it fails to detect any component automatically, it will exit and inform you about the problem. When this happens, read more below in the configure options.
+
Some examples:
+
+
Windows 32bit build with freetype specified:
+bash ./configure --with-freetype=/cygdrive/c/freetype-i586 --with-target-bits=32
Complete details on all the OpenJDK configure options can be seen with:
+
+
bash ./configure --help=short
+
+
Use -help to see all the configure options available. You can generate any number of different configurations, e.g. debug, release, 32, 64, etc.
+
Some of the more commonly used configure options are:
+
+
--enable-debug
+set the debug level to fastdebug (this is a shorthand for --with-debug-level=fastdebug)
+
+
+
+
--with-alsa=_path_
+select the location of the Advanced Linux Sound Architecture (ALSA)
+
+
+
Version 0.9.1 or newer of the ALSA files are required for building the OpenJDK on Linux. These Linux files are usually available from an “alsa” of “libasound” development package, and it’s highly recommended that you try and use the package provided by the particular version of Linux that you are using.
--with-boot-jdk-jvmargs=“args”
+provide the JVM options to be used to run the Bootstrap JDK
+
+
+
--with-cacerts=_path_
+select the path to the cacerts file.
+
+
+
See Certificate Authority on Wikipedia for a better understanding of the Certificate Authority (CA). A certificates file named “cacerts” represents a system-wide keystore with CA certificates. In JDK and JRE binary bundles, the “cacerts” file contains root CA certificates from several public CAs (e.g., VeriSign, Thawte, and Baltimore). The source contain a cacerts file without CA root certificates. Formal JDK builders will need to secure permission from each public CA and include the certificates into their own custom cacerts file. Failure to provide a populated cacerts file will result in verification errors of a certificate chain during runtime. By default an empty cacerts file is provided and that should be fine for most JDK developers.
+
+
+
+
--with-cups=_path_
+select the CUPS install location
+
+
+
The Common UNIX Printing System (CUPS) Headers are required for building the OpenJDK on Solaris and Linux. The Solaris header files can be obtained by installing the package SFWcups from the Solaris Software Companion CD/DVD, these often will be installed into the directory /opt/sfw/cups.
+
+
+
The CUPS header files can always be downloaded from www.cups.org.
+
+
+
--with-cups-include=_path_
+select the CUPS include directory location
+
+
+
--with-debug-level=_level_
+select the debug information level of release, fastdebug, or slowdebug
+
+
+
--with-dev-kit=_path_
+select location of the compiler install or developer install location
+
+
+
+
--with-freetype=_path_
+select the freetype files to use.
+
+
+
Expecting the freetype libraries under lib/ and the headers under include/.
+
+
+
Version 2.3 or newer of FreeType is required. On Unix systems required files can be available as part of your distribution (while you still may need to upgrade them). Note that you need development version of package that includes both the FreeType library and header files.
Note that by default FreeType is built with byte code hinting support disabled due to licensing restrictions. In this case, text appearance and metrics are expected to differ from Sun’s official JDK build. See the SourceForge FreeType2 Home Page for more information.
+
+
+
--with-import-hotspot=_path_
+select the location to find hotspot binaries from a previous build to avoid building hotspot
+
+
+
--with-target-bits=_arg_
+select 32 or 64 bit build
+
+
+
--with-jvm-variants=_variants_
+select the JVM variants to build from, comma separated list that can include: server, client, kernel, zero and zeroshark
+
+
+
--with-memory-size=_size_
+select the RAM size that GNU make will think this system has
+
+
+
--with-msvcr-dll=_path_
+select the msvcr100.dll file to include in the Windows builds (C/C++ runtime library for Visual Studio).
+
+
+
This is usually picked up automatically from the redist directories of Visual Studio 2010.
+
+
+
--with-num-cores=_cores_
+select the number of cores to use (processor count or CPU count)
+
+
+
+
--with-x=_path_
+select the location of the X11 and xrender files.
+
+
+
The XRender Extension Headers are required for building the OpenJDK on Solaris and Linux. The Linux header files are usually available from a “Xrender” development package, it’s recommended that you try and use the package provided by the particular distribution of Linux that you are using. The Solaris XRender header files is included with the other X11 header files in the package SFWxwinc on new enough versions of Solaris and will be installed in /usr/X11/include/X11/extensions/Xrender.h or /usr/openwin/share/include/X11/extensions/Xrender.h
+
+
+
Make
+
The basic invocation of the make utility looks like:
+
+
make all
+
+
This will start the build to the output directory containing the “configuration” that was created by the configure script. Run make help for more information on the available targets.
+
There are some of the make targets that are of general interest:
+
+
empty
+build everything but no images
+
+
+
all
+build everything including images
+
+
+
all-conf
+build all configurations
+
+
+
images
+create complete j2sdk and j2re images
+
+
+
install
+install the generated images locally, typically in /usr/local
+
+
+
clean
+remove all files generated by make, but not those generated by configure
+
+
+
dist-clean
+remove all files generated by both and configure (basically killing the configuration)
+
+
+
help
+give some help on using make, including some interesting make targets
+
+
+
Testing
+
When the build is completed, you should see the generated binaries and associated files in the j2sdk-image directory in the output directory. In particular, the build/*/images/j2sdk-image/bin directory should contain executables for the OpenJDK tools and utilities for that configuration. The testing tool jtreg will be needed and can be found at: the jtreg site. The provided regression tests in the repositories can be run with the command:
+
+
cd test && make PRODUCT_HOME=`pwd`/../build/*/images/j2sdk-image all
+
+
+
Appendix A: Hints and Tips
+
FAQ
+
Q: The generated-configure.sh file looks horrible! How are you going to edit it?
+A: The generated-configure.sh file is generated (think “compiled”) by the autoconf tools. The source code is in configure.ac and various .m4 files in common/autoconf, which are much more readable.
+
Q: Why is the generated-configure.sh file checked in, if it is generated?
+A: If it was not generated, every user would need to have the autoconf tools installed, and re-generate the configure file as the first step. Our goal is to minimize the work needed to be done by the user to start building OpenJDK, and to minimize the number of external dependencies required.
+
Q: Do you require a specific version of autoconf for regenerating generated-configure.sh?
+A: Yes, version 2.69 is required and should be easy enough to aquire on all supported operating systems. The reason for this is to avoid large spurious changes in generated-configure.sh.
+
Q: How do you regenerate generated-configure.sh after making changes to the input files?
+A: Regnerating generated-configure.sh should always be done using the script common/autoconf/autogen.sh to ensure that the correct files get updated. This script should also be run after mercurial tries to merge generated-configure.sh as a merge of the generated file is not guaranteed to be correct.
+
Q: What are the files in common/makefiles/support/* for? They look like gibberish.
+A: They are a somewhat ugly hack to compensate for command line length limitations on certain platforms (Windows, Solaris). Due to a combination of limitations in make and the shell, command lines containing too many files will not work properly. These helper files are part of an elaborate hack that will compress the command line in the makefile and then uncompress it safely. We’re not proud of it, but it does fix the problem. If you have any better suggestions, we’re all ears! :-)
+
Q: I want to see the output of the commands that make runs, like in the old build. How do I do that?
+A: You specify the LOG variable to make. There are several log levels:
+
+
warn – Default and very quiet.
+
info – Shows more progress information than warn.
+
debug – Echos all command lines and prints all macro calls for compilation definitions.
+
trace – Echos all $(shell) command lines as well.
+
+
Q: When do I have to re-run configure?
+A: Normally you will run configure only once for creating a configuration. You need to re-run configuration only if you want to change any configuration options, or if you pull down changes to the configure script.
+
Q: I have added a new source file. Do I need to modify the makefiles?
+A: Normally, no. If you want to create e.g. a new native library, you will need to modify the makefiles. But for normal file additions or removals, no changes are needed. There are certan exceptions for some native libraries where the source files are spread over many directories which also contain sources for other libraries. In these cases it was simply easier to create include lists rather than excludes.
+
Q: When I run configure --help, I see many strange options, like --dvidir. What is this?
+A: Configure provides a slew of options by default, to all projects that use autoconf. Most of them are not used in OpenJDK, so you can safely ignore them. To list only OpenJDK specific features, use configure --help=short instead.
+
Q:configure provides OpenJDK-specific features such as --with-builddeps-server that are not described in this document. What about those?
+A: Try them out if you like! But be aware that most of these are experimental features. Many of them don’t do anything at all at the moment; the option is just a placeholder. Others depend on pieces of code or infrastructure that is currently not ready for prime time.
+
Q: How will you make sure you don’t break anything?
+A: We have a script that compares the result of the new build system with the result of the old. For most part, we aim for (and achieve) byte-by-byte identical output. There are however technical issues with e.g. native binaries, which might differ in a byte-by-byte comparison, even when building twice with the old build system. For these, we compare relevant aspects (e.g. the symbol table and file size). Note that we still don’t have 100% equivalence, but we’re close.
+
Q: I noticed this thing X in the build that looks very broken by design. Why don’t you fix it?
+A: Our goal is to produce a build output that is as close as technically possible to the old build output. If things were weird in the old build, they will be weird in the new build. Often, things were weird before due to obscurity, but in the new build system the weird stuff comes up to the surface. The plan is to attack these things at a later stage, after the new build system is established.
+
Q: The code in the new build system is not that well-structured. Will you fix this?
+A: Yes! The new build system has grown bit by bit as we converted the old system. When all of the old build system is converted, we can take a step back and clean up the structure of the new build system. Some of this we plan to do before replacing the old build system and some will need to wait until after.
+
Q: Is anything able to use the results of the new build’s default make target?
+A: Yes, this is the minimal (or roughly minimal) set of compiled output needed for a developer to actually execute the newly built JDK. The idea is that in an incremental development fashion, when doing a normal make, you should only spend time recompiling what’s changed (making it purely incremental) and only do the work that’s needed to actually run and test your code. The packaging stuff that is part of the images target is not needed for a normal developer who wants to test his new code. Even if it’s quite fast, it’s still unnecessary. We’re targeting sub-second incremental rebuilds! ;-) (Or, well, at least single-digit seconds…)
+
Q: I usually set a specific environment variable when building, but I can’t find the equivalent in the new build. What should I do?
+A: It might very well be that we have neglected to add support for an option that was actually used from outside the build system. Email us and we will add support for it!
+
Build Performance Tips
+
Building OpenJDK requires a lot of horsepower. Some of the build tools can be adjusted to utilize more or less of resources such as parallel threads and memory. The configure script analyzes your system and selects reasonable values for such options based on your hardware. If you encounter resource problems, such as out of memory conditions, you can modify the detected values with:
+
+
--with-num-cores – number of cores in the build system, e.g. --with-num-cores=8
+
--with-memory-size – memory (in MB) available in the build system, e.g. --with-memory-size=1024
+
+
It might also be necessary to specify the JVM arguments passed to the Bootstrap JDK, using e.g. --with-boot-jdk-jvmargs="-Xmx8G -enableassertions". Doing this will override the default JVM arguments passed to the Bootstrap JDK.
+
One of the top goals of the new build system is to improve the build performance and decrease the time needed to build. This will soon also apply to the java compilation when the Smart Javac wrapper is making its way into jdk8. It can be tried in the build-infra repository already. You are likely to find that the new build system is faster than the old one even without this feature.
+
At the end of a successful execution of configure, you will get a performance summary, indicating how well the build will perform. Here you will also get performance hints. If you want to build fast, pay attention to those!
+
Building with ccache
+
A simple way to radically speed up compilation of native code (typically hotspot and native libraries in JDK) is to install ccache. This will cache and reuse prior compilation results, if the source code is unchanged. However, ccache versions prior to 3.1.4 does not work correctly with the precompiled headers used in OpenJDK. So if your platform supports ccache at 3.1.4 or later, we highly recommend installing it. This is currently only supported on linux.
+
Building on local disk
+
If you are using network shares, e.g. via NFS, for your source code, make sure the build directory is situated on local disk. The performance penalty is extremely high for building on a network share, close to unusable.
+
Building only one JVM
+
The old build builds multiple JVMs on 32-bit systems (client and server; and on Windows kernel as well). In the new build we have changed this default to only build server when it’s available. This improves build times for those not interested in multiple JVMs. To mimic the old behavior on platforms that support it, use --with-jvm-variants=client,server.
+
Selecting the number of cores to build on
+
By default, configure will analyze your machine and run the make process in parallel with as many threads as you have cores. This behavior can be overridden, either “permanently” (on a configure basis) using --with-num-cores=N or for a single build only (on a make basis), using make JOBS=N.
+
If you want to make a slower build just this time, to save some CPU power for other processes, you can run e.g. make JOBS=2. This will force the makefiles to only run 2 parallel processes, or even make JOBS=1 which will disable parallelism.
+
If you want to have it the other way round, namely having slow builds default and override with fast if you’re impatient, you should call configure with --with-num-cores=2, making 2 the default. If you want to run with more cores, run make JOBS=8
+
Troubleshooting
+
Solving build problems
+
If the build fails (and it’s not due to a compilation error in a source file you’ve changed), the first thing you should do is to re-run the build with more verbosity. Do this by adding LOG=debug to your make command line.
+
The build log (with both stdout and stderr intermingled, basically the same as you see on your console) can be found as build.log in your build directory.
+
You can ask for help on build problems with the new build system on either the build-dev or the build-infra-dev mailing lists. Please include the relevant parts of the build log.
+
A build can fail for any number of reasons. Most failures are a result of trying to build in an environment in which all the pre-build requirements have not been met. The first step in troubleshooting a build failure is to recheck that you have satisfied all the pre-build requirements for your platform. Scanning the configure log is a good first step, making sure that what it found makes sense for your system. Look for strange error messages or any difficulties that configure had in finding things.
+
Some of the more common problems with builds are briefly described below, with suggestions for remedies.
+
+
Corrupted Bundles on Windows:
+Some virus scanning software has been known to corrupt the downloading of zip bundles. It may be necessary to disable the ‘on access’ or ‘real time’ virus scanning features to prevent this corruption. This type of ‘real time’ virus scanning can also slow down the build process significantly. Temporarily disabling the feature, or excluding the build output directory may be necessary to get correct and faster builds.
+
Slow Builds:
+If your build machine seems to be overloaded from too many simultaneous C++ compiles, try setting the JOBS=1 on the make command line. Then try increasing the count slowly to an acceptable level for your system. Also:
+
Creating the javadocs can be very slow, if you are running javadoc, consider skipping that step.
+
Faster CPUs, more RAM, and a faster DISK usually helps. The VM build tends to be CPU intensive (many C++ compiles), and the rest of the JDK will often be disk intensive.
+
Faster compiles are possible using a tool called ccache.
+
File time issues:
+If you see warnings that refer to file time stamps, e.g.
+
+
Warning message:File 'xxx' has modification time in the future.
+Warning message:Clock skew detected. Your build may be incomplete.
+
+
These warnings can occur when the clock on the build machine is out of sync with the timestamps on the source files. Other errors, apparently unrelated but in fact caused by the clock skew, can occur along with the clock skew warnings. These secondary errors may tend to obscure the fact that the true root cause of the problem is an out-of-sync clock.
+
If you see these warnings, reset the clock on the build machine, run “gmake clobber” or delete the directory containing the build output, and restart the build from the beginning.
+
Error message: Trouble writing out table to disk
+Increase the amount of swap space on your build machine. This could be caused by overloading the system and it may be necessary to use:
+
+
make JOBS=1
+
+
to reduce the load on the system.
+
Error Message: libstdc++ not found:
+This is caused by a missing libstdc++.a library. This is installed as part of a specific package (e.g. libstdc++.so.devel.386). By default some 64-bit Linux versions (e.g. Fedora) only install the 64-bit version of the libstdc++ package. Various parts of the JDK build require a static link of the C++ runtime libraries to allow for maximum portability of the built images.
+
Linux Error Message: cannot restore segment prot after reloc
+This is probably an issue with SELinux (See SELinux on Wikipedia). Parts of the VM is built without the -fPIC for performance reasons.
+
To completely disable SELinux:
+
+
$ su root
+
# system-config-securitylevel
+
In the window that appears, select the SELinux tab
+
Disable SELinux
+
+
Alternatively, instead of completely disabling it you could disable just this one check.
+
+
Select System->Administration->SELinux Management
+
In the SELinux Management Tool which appears, select “Boolean” from the menu on the left
+
Expand the “Memory Protection” group
+
Check the first item, labeled “Allow all unconfined executables to use libraries requiring text relocation …”
+
+
Windows Error Messages:
+*** fatal error - couldn't allocate heap, ...
+rm fails with "Directory not empty"
+unzip fails with "cannot create ... Permission denied"
+unzip fails with "cannot create ... Error 50"
Windows Error Message: spawn failed
+Try rebooting the system, or there could be some kind of issue with the disk or disk partition being used. Sometimes it comes with a “Permission Denied” message.
+
+
+
Appendix B: GNU make
+
The Makefiles in the OpenJDK are only valid when used with the GNU version of the utility command make (usually called gmake on Solaris). A few notes about using GNU make:
+
+
You need GNU make version 3.81 or newer. If the GNU make utility on your systems is not 3.81 or newer, see “Building GNU make”.
+
Place the location of the GNU make binary in the PATH.
+
Solaris: Do NOT use /usr/bin/make on Solaris. If your Solaris system has the software from the Solaris Developer Companion CD installed, you should try and use gmake which will be located in either the /usr/bin, /opt/sfw/bin or /usr/sfw/bin directory.
+
Windows: Make sure you start your build inside a bash shell.
+
Mac OS X: The XCode “command line tools” must be installed on your Mac.
+
+
Information on GNU make, and access to ftp download sites, are available on the GNU make web site. The latest source to GNU make is available at ftp.gnu.org/pub/gnu/make/.
+
Building GNU make
+
First step is to get the GNU make 3.81 or newer source from ftp.gnu.org/pub/gnu/make/. Building is a little different depending on the OS but is basically done with:
+
bash ./configure
+ make
+
+
Appendix C: Build Environments
+
Minimum Build Environments
+
This file often describes specific requirements for what we call the “minimum build environments” (MBE) for this specific release of the JDK. What is listed below is what the Oracle Release Engineering Team will use to build the Oracle JDK product. Building with the MBE will hopefully generate the most compatible bits that install on, and run correctly on, the most variations of the same base OS and hardware architecture. In some cases, these represent what is often called the least common denominator, but each Operating System has different aspects to it.
+
In all cases, the Bootstrap JDK version minimum is critical, we cannot guarantee builds will work with older Bootstrap JDK’s. Also in all cases, more RAM and more processors is better, the minimums listed below are simply recommendations.
+
With Solaris and Mac OS X, the version listed below is the oldest release we can guarantee builds and works, and the specific version of the compilers used could be critical.
+
With Windows the critical aspect is the Visual Studio compiler used, which due to it’s runtime, generally dictates what Windows systems can do the builds and where the resulting bits can be used.
+
NOTE: We expect a change here off these older Windows OS releases and to a ‘less older’ one, probably Windows 2008R2 X64.
+
With Linux, it was just a matter of picking a stable distribution that is a good representative for Linux in general.
+
NOTE: We expect a change here from Fedora 9 to something else, but it has not been completely determined yet, possibly Ubuntu 12.04 X64, unbiased community feedback would be welcome on what a good choice would be here.
+
It is understood that most developers will NOT be using these specific versions, and in fact creating these specific versions may be difficult due to the age of some of this software. It is expected that developers are more often using the more recent releases and distributions of these operating systems.
+
Compilation problems with newer or different C/C++ compilers is a common problem. Similarly, compilation problems related to changes to the /usr/include or system header files is also a common problem with older, newer, or unreleased OS versions. Please report these types of problems as bugs so that they can be dealt with accordingly.
+
Bootstrap JDK: JDK 7u7
+
+
+
+
Base OS and Architecture
+
OS
+
C/C++ Compiler
+
Processors
+
RAM Minimum
+
DISK Needs
+
+
+
+
+
Linux X86 (32-bit) and X64 (64-bit)
+
Fedora 9
+
gcc 4.3
+
2 or more
+
1 GB
+
6 GB
+
+
+
Solaris SPARC (32-bit) and SPARCV9 (64-bit)
+
Solaris 10 Update 6
+
Studio 12 Update 1 + patches
+
4 or more
+
4 GB
+
8 GB
+
+
+
Solaris X86 (32-bit) and X64 (64-bit)
+
Solaris 10 Update 6
+
Studio 12 Update 1 + patches
+
4 or more
+
4 GB
+
8 GB
+
+
+
Windows X86 (32-bit)
+
Windows XP
+
Microsoft Visual Studio C++ 2010 Professional Edition
+
2 or more
+
2 GB
+
6 GB
+
+
+
Windows X64 (64-bit)
+
Windows Server 2003 - Enterprise x64 Edition
+
Microsoft Visual Studio C++ 2010 Professional Edition
+
2 or more
+
2 GB
+
6 GB
+
+
+
Mac OS X X64 (64-bit)
+
Mac OS X 10.7 “Lion”
+
XCode 4.5.2 or newer
+
2 or more
+
4 GB
+
6 GB
+
+
+
+
+
Specific Developer Build Environments
+
We won’t be listing all the possible environments, but we will try to provide what information we have available to us.
+
NOTE: The community can help out by updating this part of the document.
+
Fedora
+
After installing the latest Fedora you need to install several build dependencies. The simplest way to do it is to execute the following commands as user root:
After installing CentOS 5.5 you need to make sure you have the following Development bundles installed:
+
+
Development Libraries
+
Development Tools
+
Java Development
+
X Software Development (Including XFree86-devel)
+
+
Plus the following packages:
+
+
cups devel: Cups Development Package
+
alsa devel: Alsa Development Package
+
Xi devel: libXi.so Development Package
+
+
The freetype 2.3 packages don’t seem to be available, but the freetype 2.3 sources can be downloaded, built, and installed easily enough from the freetype site. Build and install with something like:
+
bash ./configure
+ make
+ sudo -u root make install
+
Mercurial packages could not be found easily, but a Google search should find ones, and they usually include Python if it’s needed.
+
Debian 5.0 (Lenny)
+
After installing Debian 5 you need to install several build dependencies. The simplest way to install the build dependencies is to execute the following commands as user root:
After installing OpenSUSE 11.1 you need to install several build dependencies. The simplest way to install the build dependencies is to execute the following commands:
+
sudo zypper source-install -d java-1_7_0-openjdk
+ sudo zypper install make
+
In addition, it is necessary to set a few environment variables for the build:
Finally, you need to unset the JAVA_HOME environment variable:
+
export -n JAVA_HOME`
+
Mandriva Linux One 2009 Spring
+
After installing Mandriva Linux One 2009 Spring you need to install several build dependencies. The simplest way to install the build dependencies is to execute the following commands as user root:
+
urpmi java-1.7.0-openjdk-devel make gcc gcc-c++ freetype-devel zip unzip
+ libcups2-devel libxrender1-devel libalsa2-devel libstc++-static-devel
+ libxtst6-devel libxi-devel
+
In addition, it is necessary to set a few environment variables for the build:
After installing OpenSolaris 2009.06 you need to install several build dependencies. The simplest way to install the build dependencies is to execute the following commands:
+
+
diff --git a/README-builds.md b/doc/building.md
similarity index 80%
rename from README-builds.md
rename to doc/building.md
index 364fd661e3c..460e4d12668 100644
--- a/README-builds.md
+++ b/doc/building.md
@@ -1,9 +1,9 @@
+% OpenJDK Build README
+
![OpenJDK](http://openjdk.java.net/images/openjdk.png)
-# OpenJDK Build README
-*****
+--------------------------------------------------------------------------------
-
## Introduction
This README file contains build instructions for the
@@ -18,34 +18,34 @@ Some Headlines:
* Any GNU make 3.81 or newer should work
* The build should scale, i.e. more processors should cause the build to be
done in less wall-clock time
- * Nested or recursive make invocations have been significantly reduced,
- as has the total fork/exec or spawning of sub processes during the build
+ * Nested or recursive make invocations have been significantly reduced, as
+ has the total fork/exec or spawning of sub processes during the build
* Windows MKS usage is no longer supported
* Windows Visual Studio `vsvars*.bat` and `vcvars*.bat` files are run
automatically
* Ant is no longer used when building the OpenJDK
- * Use of ALT_* environment variables for configuring the build is no longer
+ * Use of ALT\_\* environment variables for configuring the build is no longer
supported
-*****
+-------------------------------------------------------------------------------
## Contents
* [Introduction](#introduction)
* [Use of Mercurial](#hg)
- * [Getting the Source](#get_source)
- * [Repositories](#repositories)
+ * [Getting the Source](#get_source)
+ * [Repositories](#repositories)
* [Building](#building)
- * [System Setup](#setup)
- * [Linux](#linux)
- * [Solaris](#solaris)
- * [Mac OS X](#macosx)
- * [Windows](#windows)
- * [Configure](#configure)
- * [Make](#make)
+ * [System Setup](#setup)
+ * [Linux](#linux)
+ * [Solaris](#solaris)
+ * [Mac OS X](#macosx)
+ * [Windows](#windows)
+ * [Configure](#configure)
+ * [Make](#make)
* [Testing](#testing)
-*****
+-------------------------------------------------------------------------------
* [Appendix A: Hints and Tips](#hints)
* [FAQ](#faq)
@@ -54,23 +54,22 @@ Some Headlines:
* [Appendix B: GNU Make Information](#gmake)
* [Appendix C: Build Environments](#buildenvironments)
-*****
+-------------------------------------------------------------------------------
-
## Use of Mercurial
The OpenJDK sources are maintained with the revision control system
[Mercurial](http://mercurial.selenic.com/wiki/Mercurial). If you are new to
-Mercurial, please see the [Beginner Guides](http://mercurial.selenic.com/wiki/
-BeginnersGuides) or refer to the [Mercurial Book](http://hgbook.red-bean.com/).
-The first few chapters of the book provide an excellent overview of Mercurial,
-what it is and how it works.
+Mercurial, please see the [Beginner
+Guides](http://mercurial.selenic.com/wiki/BeginnersGuides) or refer to the
+[Mercurial Book](http://hgbook.red-bean.com/). The first few chapters of the
+book provide an excellent overview of Mercurial, what it is and how it works.
For using Mercurial with the OpenJDK refer to the [Developer Guide: Installing
-and Configuring Mercurial](http://openjdk.java.net/guide/
-repositories.html#installConfig) section for more information.
+and Configuring
+Mercurial](http://openjdk.java.net/guide/repositories.html#installConfig)
+section for more information.
-
### Getting the Source
To get the entire set of OpenJDK Mercurial repositories use the script
@@ -82,16 +81,15 @@ To get the entire set of OpenJDK Mercurial repositories use the script
Once you have all the repositories, keep in mind that each repository is its
own independent repository. You can also re-run `./get_source.sh` anytime to
-pull over all the latest changesets in all the repositories. This set of
-nested repositories has been given the term "forest" and there are various
-ways to apply the same `hg` command to each of the repositories. For
-example, the script `make/scripts/hgforest.sh` can be used to repeat the
-same `hg` command on every repository, e.g.
+pull over all the latest changesets in all the repositories. This set of nested
+repositories has been given the term "forest" and there are various ways to
+apply the same `hg` command to each of the repositories. For example, the
+script `make/scripts/hgforest.sh` can be used to repeat the same `hg` command
+on every repository, e.g.
cd YourOpenJDK
bash ./make/scripts/hgforest.sh status
-
### Repositories
The set of repositories and what they contain:
@@ -134,9 +132,8 @@ There are some very basic guidelines:
* Files not needed for typical building or testing of the repository should
not be added to the repository.
-*****
+-------------------------------------------------------------------------------
-
## Building
The very first step in building the OpenJDK is making sure the system itself
@@ -147,7 +144,7 @@ Building the OpenJDK is now done with running a `configure` script which will
try and find and verify you have everything you need, followed by running
`make`, e.g.
-> **`bash ./configure`**
+> **`bash ./configure`** \
> **`make all`**
Where possible the `configure` script will attempt to located the various
@@ -160,9 +157,8 @@ system due to missing software packages.
**NOTE:** The `configure` script file does not have execute permissions and
will need to be explicitly run with `bash`, see the source guidelines.
-*****
+-------------------------------------------------------------------------------
-
### System Setup
Before even attempting to use a system to build the OpenJDK there are some very
@@ -173,15 +169,15 @@ basic system setups needed. For all systems:
* Install a Bootstrap JDK. All OpenJDK builds require access to a previously
- released JDK called the _bootstrap JDK_ or _boot JDK._ The general rule is
+ released JDK called the *bootstrap JDK* or *boot JDK.* The general rule is
that the bootstrap JDK must be an instance of the previous major release of
the JDK. In addition, there may be a requirement to use a release at or
beyond a particular update level.
- **_Building JDK 8 requires use of a version of JDK 7 this is at Update 7
+ ***Building JDK 8 requires use of a version of JDK 7 this is at Update 7
or newer. JDK 8 developers should not use JDK 8 as the boot JDK, to ensure
that JDK 8 dependencies are not introduced into the parts of the system
- that are built with JDK 7._**
+ that are built with JDK 7.***
The JDK 7 binaries can be downloaded from Oracle's [JDK 7 download
site](http://www.oracle.com/technetwork/java/javase/downloads/index.html).
@@ -219,7 +215,6 @@ And for specific systems:
install the "Command line tools" found under the preferences pane
"Downloads"
-
#### Linux
With Linux, try and favor the system packages over building your own or getting
@@ -233,16 +228,14 @@ refer to the JDK installed on your Linux system. You will need to unset
you are getting from the default system settings make sense for building the
OpenJDK.
-
#### Solaris
-
##### Studio Compilers
-At a minimum, the [Studio 12 Update 1 Compilers](http://www.oracle.com/
-technetwork/server-storage/solarisstudio/downloads/index.htm) (containing
-version 5.10 of the C and C++ compilers) is required, including specific
-patches.
+At a minimum, the [Studio 12 Update 1
+Compilers](http://www.oracle.com/technetwork/server-storage/solarisstudio/downloads/index.htm)
+(containing version 5.10 of the C and C++ compilers) is required, including
+specific patches.
The Solaris SPARC patch list is:
@@ -273,11 +266,11 @@ The Solaris X86 patch list is:
Place the `bin` directory in `PATH`.
The Oracle Solaris Studio Express compilers at: [Oracle Solaris Studio Express
-Download site](http://www.oracle.com/technetwork/server-storage/solarisstudio/
-downloads/index-jsp-142582.html) are also an option, although these compilers
-have not been extensively used yet.
+Download
+site](http://www.oracle.com/technetwork/server-storage/solarisstudio/downloads/index-jsp-142582.html)
+are also an option, although these compilers have not been extensively used
+yet.
-
#### Windows
##### Windows Unix Toolkit
@@ -295,7 +288,6 @@ backslashes to forward slashes, escaping backslashes with additional
backslashes and translating the path names to their ["8.3"
version](http://en.wikipedia.org/wiki/8.3_filename).
-
###### CYGWIN
CYGWIN is an open source, Linux-like environment which tries to emulate a
@@ -323,80 +315,24 @@ By default CYGWIN doesn't install all the tools required for building the
OpenJDK. Along with the default installation, you need to install the following
tools.
->
-
-
-
Binary Name
-
Category
-
Package
-
Description
-
-
-
-
-
ar.exe
-
Devel
-
binutils
-
The GNU assembler, linker and binary utilities
-
-
-
make.exe
-
Devel
-
make
-
The GNU version of the 'make' utility built for CYGWIN
-
-
-
m4.exe
-
Interpreters
-
m4
-
GNU implementation of the traditional Unix macro processor
-
-
-
cpio.exe
-
Utils
-
cpio
-
A program to manage archives of files
-
-
-
gawk.exe
-
Utils
-
awk
-
Pattern-directed scanning and processing language
-
-
-
file.exe
-
Utils
-
file
-
Determines file type using 'magic' numbers
-
-
-
zip.exe
-
Archive
-
zip
-
Package and compress (archive) files
-
-
-
unzip.exe
-
Archive
-
unzip
-
Extract compressed files in a ZIP archive
-
-
-
free.exe
-
System
-
procps
-
Display amount of free and used memory in the system
-
-
-
+ Binary Name Category Package Description
+ ------------- -------------- ---------- ------------------------------------------------------------
+ ar.exe Devel binutils The GNU assembler, linker and binary utilities
+ make.exe Devel make The GNU version of the 'make' utility built for CYGWIN
+ m4.exe Interpreters m4 GNU implementation of the traditional Unix macro processor
+ cpio.exe Utils cpio A program to manage archives of files
+ gawk.exe Utils awk Pattern-directed scanning and processing language
+ file.exe Utils file Determines file type using 'magic' numbers
+ zip.exe Archive zip Package and compress (archive) files
+ unzip.exe Archive unzip Extract compressed files in a ZIP archive
+ free.exe System procps Display amount of free and used memory in the system
Note that the CYGWIN software can conflict with other non-CYGWIN software on
-your Windows system. CYGWIN provides a [FAQ](http://cygwin.com/faq/
-faq.using.html) for known issues and problems, of particular interest is the
-section on [BLODA (applications that interfere with
-CYGWIN)](http://cygwin.com/faq/faq.using.html#faq.using.bloda).
+your Windows system. CYGWIN provides a
+[FAQ](http://cygwin.com/faq/faq.using.html) for known issues and problems,
+of particular interest is the section on [BLODA (applications that interfere
+with CYGWIN)](http://cygwin.com/faq/faq.using.html#faq.using.bloda).
-
###### MinGW/MSYS
MinGW ("Minimalist GNU for Windows") is a collection of free Windows specific
@@ -404,20 +340,20 @@ header files and import libraries combined with GNU toolsets that allow one to
produce native Windows programs that do not rely on any 3rd-party C runtime
DLLs. MSYS is a supplement to MinGW which allows building applications and
programs which rely on traditional UNIX tools to be present. Among others this
-includes tools like `bash` and `make`. See [MinGW/MSYS](http://www.mingw.org/
-wiki/MSYS) for more information.
+includes tools like `bash` and `make`. See
+[MinGW/MSYS](http://www.mingw.org/wiki/MSYS) for more information.
Like Cygwin, MinGW/MSYS can handle different types of path formats. They are
-internally converted to paths with forward slashes and drive letters
-`:` replaced by a virtual directory `/`. Additionally, MSYS
-automatically detects binaries compiled for the MSYS environment and feeds them
-with the internal, Unix-style path names. If native Windows applications are
-called from within MSYS programs their path arguments are automatically
-converted back to Windows style path names with drive letters and backslashes
-as path separators. This may cause problems for Windows applications which use
-forward slashes as parameter separator (e.g. `cl /nologo /I`) because MSYS may
-wrongly [replace such parameters by drive letters](http://mingw.org/wiki/
-Posix_path_conversion).
+internally converted to paths with forward slashes and drive letters `:`
+replaced by a virtual directory `/`. Additionally, MSYS automatically
+detects binaries compiled for the MSYS environment and feeds them with the
+internal, Unix-style path names. If native Windows applications are called from
+within MSYS programs their path arguments are automatically converted back to
+Windows style path names with drive letters and backslashes as path separators.
+This may cause problems for Windows applications which use forward slashes as
+parameter separator (e.g. `cl /nologo /I`) because MSYS may wrongly [replace
+such parameters by drive
+letters](http://mingw.org/wiki/Posix_path_conversion).
In addition to the tools which will be installed by default, you have to
manually install the `msys-zip` and `msys-unzip` packages. This can be easily
@@ -426,7 +362,6 @@ done with the MinGW command line installer:
mingw-get.exe install msys-zip
mingw-get.exe install msys-unzip
-
##### Visual Studio 2010 Compilers
The 32-bit and 64-bit OpenJDK Windows build requires Microsoft Visual Studio
@@ -445,14 +380,12 @@ and not `C:/temp`. `C:\temp` is just an example, it is assumed that this area
is private to the user, so by default after installs you should see a unique
user path in these variables.
-
#### Mac OS X
Make sure you get the right XCode version.
-*****
+-------------------------------------------------------------------------------
-
### Configure
The basic invocation of the `configure` script looks like:
@@ -472,14 +405,12 @@ happens, read more below in [the `configure` options](#configureoptions).
Some examples:
-> **Windows 32bit build with freetype specified:**
-> `bash ./configure --with-freetype=/cygdrive/c/freetype-i586 --with-target-
-bits=32`
+> **Windows 32bit build with freetype specified:** \
+> `bash ./configure --with-freetype=/cygdrive/c/freetype-i586 --with-target-bits=32`
-> **Debug 64bit Build:**
+> **Debug 64bit Build:** \
> `bash ./configure --enable-debug --with-target-bits=64`
-
#### Configure Options
Complete details on all the OpenJDK `configure` options can be seen with:
@@ -491,12 +422,13 @@ number of different configurations, e.g. debug, release, 32, 64, etc.
Some of the more commonly used `configure` options are:
-> **`--enable-debug`**
-> set the debug level to fastdebug (this is a shorthand for `--with-debug-
- level=fastdebug`)
+> **`--enable-debug`** \
+> set the debug level to fastdebug (this is a shorthand for
+> `--with-debug-level=fastdebug`)
-> **`--with-alsa=`**_path_
+
+> **`--with-alsa=`**_path_ \
> select the location of the Advanced Linux Sound Architecture (ALSA)
> Version 0.9.1 or newer of the ALSA files are required for building the
@@ -505,29 +437,31 @@ Some of the more commonly used `configure` options are:
and use the package provided by the particular version of Linux that you are
using.
-> **`--with-boot-jdk=`**_path_
+> **`--with-boot-jdk=`**_path_ \
> select the [Bootstrap JDK](#bootjdk)
-> **`--with-boot-jdk-jvmargs=`**"_args_"
+> **`--with-boot-jdk-jvmargs=`**"_args_" \
> provide the JVM options to be used to run the [Bootstrap JDK](#bootjdk)
-> **`--with-cacerts=`**_path_
+> **`--with-cacerts=`**_path_ \
> select the path to the cacerts file.
-> See [Certificate Authority on Wikipedia](http://en.wikipedia.org/wiki/
- Certificate_Authority) for a better understanding of the Certificate
- Authority (CA). A certificates file named "cacerts" represents a system-wide
- keystore with CA certificates. In JDK and JRE binary bundles, the "cacerts"
- file contains root CA certificates from several public CAs (e.g., VeriSign,
- Thawte, and Baltimore). The source contain a cacerts file without CA root
- certificates. Formal JDK builders will need to secure permission from each
- public CA and include the certificates into their own custom cacerts file.
- Failure to provide a populated cacerts file will result in verification
- errors of a certificate chain during runtime. By default an empty cacerts
- file is provided and that should be fine for most JDK developers.
+> See [Certificate Authority on
+ Wikipedia](http://en.wikipedia.org/wiki/Certificate_Authority) for a
+ better understanding of the Certificate Authority (CA). A certificates file
+ named "cacerts" represents a system-wide keystore with CA certificates. In
+ JDK and JRE binary bundles, the "cacerts" file contains root CA certificates
+ from several public CAs (e.g., VeriSign, Thawte, and Baltimore). The source
+ contain a cacerts file without CA root certificates. Formal JDK builders will
+ need to secure permission from each public CA and include the certificates
+ into their own custom cacerts file. Failure to provide a populated cacerts
+ file will result in verification errors of a certificate chain during
+ runtime. By default an empty cacerts file is provided and that should be fine
+ for most JDK developers.
-> **`--with-cups=`**_path_
+
+> **`--with-cups=`**_path_ \
> select the CUPS install location
> The Common UNIX Printing System (CUPS) Headers are required for building the
@@ -538,17 +472,18 @@ Some of the more commonly used `configure` options are:
> The CUPS header files can always be downloaded from
[www.cups.org](http://www.cups.org).
-> **`--with-cups-include=`**_path_
+> **`--with-cups-include=`**_path_ \
> select the CUPS include directory location
-> **`--with-debug-level=`**_level_
+> **`--with-debug-level=`**_level_ \
> select the debug information level of release, fastdebug, or slowdebug
-> **`--with-dev-kit=`**_path_
+> **`--with-dev-kit=`**_path_ \
> select location of the compiler install or developer install location
-> **`--with-freetype=`**_path_
+
+> **`--with-freetype=`**_path_ \
> select the freetype files to use.
> Expecting the freetype libraries under `lib/` and the headers under
@@ -570,32 +505,33 @@ Some of the more commonly used `configure` options are:
[SourceForge FreeType2 Home Page](http://freetype.sourceforge.net/freetype2)
for more information.
-> **`--with-import-hotspot=`**_path_
+> **`--with-import-hotspot=`**_path_ \
> select the location to find hotspot binaries from a previous build to avoid
building hotspot
-> **`--with-target-bits=`**_arg_
+> **`--with-target-bits=`**_arg_ \
> select 32 or 64 bit build
-> **`--with-jvm-variants=`**_variants_
+> **`--with-jvm-variants=`**_variants_ \
> select the JVM variants to build from, comma separated list that can
include: server, client, kernel, zero and zeroshark
-> **`--with-memory-size=`**_size_
+> **`--with-memory-size=`**_size_ \
> select the RAM size that GNU make will think this system has
-> **`--with-msvcr-dll=`**_path_
+> **`--with-msvcr-dll=`**_path_ \
> select the `msvcr100.dll` file to include in the Windows builds (C/C++
runtime library for Visual Studio).
> This is usually picked up automatically from the redist directories of
Visual Studio 2010.
-> **`--with-num-cores=`**_cores_
+> **`--with-num-cores=`**_cores_ \
> select the number of cores to use (processor count or CPU count)
-> **`--with-x=`**_path_
+
+> **`--with-x=`**_path_ \
> select the location of the X11 and xrender files.
> The XRender Extension Headers are required for building the OpenJDK on
@@ -607,9 +543,8 @@ Some of the more commonly used `configure` options are:
installed in `/usr/X11/include/X11/extensions/Xrender.h` or
`/usr/openwin/share/include/X11/extensions/Xrender.h`
-*****
+-------------------------------------------------------------------------------
-
### Make
The basic invocation of the `make` utility looks like:
@@ -622,34 +557,33 @@ more information on the available targets.
There are some of the make targets that are of general interest:
-> _empty_
+> _empty_ \
> build everything but no images
-> **`all`**
+> **`all`** \
> build everything including images
-> **`all-conf`**
+> **`all-conf`** \
> build all configurations
-> **`images`**
+> **`images`** \
> create complete j2sdk and j2re images
-> **`install`**
+> **`install`** \
> install the generated images locally, typically in `/usr/local`
-> **`clean`**
+> **`clean`** \
> remove all files generated by make, but not those generated by `configure`
-> **`dist-clean`**
+> **`dist-clean`** \
> remove all files generated by both and `configure` (basically killing the
configuration)
-> **`help`**
+> **`help`** \
> give some help on using `make`, including some interesting make targets
-*****
+-------------------------------------------------------------------------------
-
## Testing
When the build is completed, you should see the generated binaries and
@@ -662,35 +596,33 @@ repositories can be run with the command:
> **``cd test && make PRODUCT_HOME=`pwd`/../build/*/images/j2sdk-image all``**
-*****
+-------------------------------------------------------------------------------
-
## Appendix A: Hints and Tips
-
### FAQ
**Q:** The `generated-configure.sh` file looks horrible! How are you going to
-edit it?
+edit it? \
**A:** The `generated-configure.sh` file is generated (think "compiled") by the
autoconf tools. The source code is in `configure.ac` and various .m4 files in
common/autoconf, which are much more readable.
-**Q:** Why is the `generated-configure.sh` file checked in, if it is
-generated?
+**Q:** Why is the `generated-configure.sh` file checked in, if it is
+generated? \
**A:** If it was not generated, every user would need to have the autoconf
tools installed, and re-generate the `configure` file as the first step. Our
goal is to minimize the work needed to be done by the user to start building
OpenJDK, and to minimize the number of external dependencies required.
**Q:** Do you require a specific version of autoconf for regenerating
-`generated-configure.sh`?
+`generated-configure.sh`? \
**A:** Yes, version 2.69 is required and should be easy enough to aquire on all
supported operating systems. The reason for this is to avoid large spurious
changes in `generated-configure.sh`.
**Q:** How do you regenerate `generated-configure.sh` after making changes to
-the input files?
+the input files? \
**A:** Regnerating `generated-configure.sh` should always be done using the
script `common/autoconf/autogen.sh` to ensure that the correct files get
updated. This script should also be run after mercurial tries to merge
@@ -698,7 +630,7 @@ updated. This script should also be run after mercurial tries to merge
be correct.
**Q:** What are the files in `common/makefiles/support/*` for? They look like
-gibberish.
+gibberish. \
**A:** They are a somewhat ugly hack to compensate for command line length
limitations on certain platforms (Windows, Solaris). Due to a combination of
limitations in make and the shell, command lines containing too many files will
@@ -708,21 +640,21 @@ not proud of it, but it does fix the problem. If you have any better
suggestions, we're all ears! :-)
**Q:** I want to see the output of the commands that make runs, like in the old
-build. How do I do that?
+build. How do I do that? \
**A:** You specify the `LOG` variable to make. There are several log levels:
* **`warn`** -- Default and very quiet.
* **`info`** -- Shows more progress information than warn.
* **`debug`** -- Echos all command lines and prints all macro calls for
compilation definitions.
- * **`trace`** -- Echos all $(shell) command lines as well.
+ * **`trace`** -- Echos all \$(shell) command lines as well.
-**Q:** When do I have to re-run `configure`?
+**Q:** When do I have to re-run `configure`? \
**A:** Normally you will run `configure` only once for creating a
configuration. You need to re-run configuration only if you want to change any
configuration options, or if you pull down changes to the `configure` script.
-**Q:** I have added a new source file. Do I need to modify the makefiles?
+**Q:** I have added a new source file. Do I need to modify the makefiles? \
**A:** Normally, no. If you want to create e.g. a new native library, you will
need to modify the makefiles. But for normal file additions or removals, no
changes are needed. There are certan exceptions for some native libraries where
@@ -731,20 +663,21 @@ for other libraries. In these cases it was simply easier to create include
lists rather than excludes.
**Q:** When I run `configure --help`, I see many strange options, like
-`--dvidir`. What is this?
+`--dvidir`. What is this? \
**A:** Configure provides a slew of options by default, to all projects that
use autoconf. Most of them are not used in OpenJDK, so you can safely ignore
them. To list only OpenJDK specific features, use `configure --help=short`
instead.
-**Q:** `configure` provides OpenJDK-specific features such as `--with-
-builddeps-server` that are not described in this document. What about those?
+**Q:** `configure` provides OpenJDK-specific features such as
+`--with-builddeps-server` that are not described in this document. What about
+those? \
**A:** Try them out if you like! But be aware that most of these are
experimental features. Many of them don't do anything at all at the moment; the
option is just a placeholder. Others depend on pieces of code or infrastructure
that is currently not ready for prime time.
-**Q:** How will you make sure you don't break anything?
+**Q:** How will you make sure you don't break anything? \
**A:** We have a script that compares the result of the new build system with
the result of the old. For most part, we aim for (and achieve) byte-by-byte
identical output. There are however technical issues with e.g. native binaries,
@@ -754,7 +687,7 @@ table and file size). Note that we still don't have 100% equivalence, but we're
close.
**Q:** I noticed this thing X in the build that looks very broken by design.
-Why don't you fix it?
+Why don't you fix it? \
**A:** Our goal is to produce a build output that is as close as technically
possible to the old build output. If things were weird in the old build, they
will be weird in the new build. Often, things were weird before due to
@@ -763,14 +696,14 @@ The plan is to attack these things at a later stage, after the new build system
is established.
**Q:** The code in the new build system is not that well-structured. Will you
-fix this?
+fix this? \
**A:** Yes! The new build system has grown bit by bit as we converted the old
system. When all of the old build system is converted, we can take a step back
and clean up the structure of the new build system. Some of this we plan to do
before replacing the old build system and some will need to wait until after.
**Q:** Is anything able to use the results of the new build's default make
-target?
+target? \
**A:** Yes, this is the minimal (or roughly minimal) set of compiled output
needed for a developer to actually execute the newly built JDK. The idea is
that in an incremental development fashion, when doing a normal make, you
@@ -782,12 +715,11 @@ it's still unnecessary. We're targeting sub-second incremental rebuilds! ;-)
(Or, well, at least single-digit seconds...)
**Q:** I usually set a specific environment variable when building, but I can't
-find the equivalent in the new build. What should I do?
+find the equivalent in the new build. What should I do? \
**A:** It might very well be that we have neglected to add support for an
option that was actually used from outside the build system. Email us and we
will add support for it!
-
### Build Performance Tips
Building OpenJDK requires a lot of horsepower. Some of the build tools can be
@@ -858,7 +790,6 @@ and override with fast if you're impatient, you should call `configure` with
`--with-num-cores=2`, making 2 the default. If you want to run with more cores,
run `make JOBS=8`
-
### Troubleshooting
#### Solving build problems
@@ -886,7 +817,7 @@ difficulties that `configure` had in finding things.
Some of the more common problems with builds are briefly described below, with
suggestions for remedies.
- * **Corrupted Bundles on Windows:**
+ * **Corrupted Bundles on Windows:** \
Some virus scanning software has been known to corrupt the downloading of
zip bundles. It may be necessary to disable the 'on access' or 'real time'
virus scanning features to prevent this corruption. This type of 'real time'
@@ -894,7 +825,7 @@ suggestions for remedies.
Temporarily disabling the feature, or excluding the build output directory
may be necessary to get correct and faster builds.
- * **Slow Builds:**
+ * **Slow Builds:** \
If your build machine seems to be overloaded from too many simultaneous C++
compiles, try setting the `JOBS=1` on the `make` command line. Then try
increasing the count slowly to an acceptable level for your system. Also:
@@ -909,10 +840,10 @@ suggestions for remedies.
Faster compiles are possible using a tool called
[ccache](http://ccache.samba.org/).
- * **File time issues:**
+ * **File time issues:** \
If you see warnings that refer to file time stamps, e.g.
- > _Warning message:_ ` File 'xxx' has modification time in the future.`
+ > _Warning message:_ ` File 'xxx' has modification time in the future.` \
> _Warning message:_ ` Clock skew detected. Your build may be incomplete.`
These warnings can occur when the clock on the build machine is out of sync
@@ -925,7 +856,7 @@ suggestions for remedies.
"`gmake clobber`" or delete the directory containing the build output, and
restart the build from the beginning.
- * **Error message: `Trouble writing out table to disk`**
+ * **Error message: `Trouble writing out table to disk`** \
Increase the amount of swap space on your build machine. This could be
caused by overloading the system and it may be necessary to use:
@@ -933,7 +864,7 @@ suggestions for remedies.
to reduce the load on the system.
- * **Error Message: `libstdc++ not found`:**
+ * **Error Message: `libstdc++ not found`:** \
This is caused by a missing libstdc++.a library. This is installed as part
of a specific package (e.g. libstdc++.so.devel.386). By default some 64-bit
Linux versions (e.g. Fedora) only install the 64-bit version of the
@@ -941,7 +872,7 @@ suggestions for remedies.
the C++ runtime libraries to allow for maximum portability of the built
images.
- * **Linux Error Message: `cannot restore segment prot after reloc`**
+ * **Linux Error Message: `cannot restore segment prot after reloc`** \
This is probably an issue with SELinux (See [SELinux on
Wikipedia](http://en.wikipedia.org/wiki/SELinux)). Parts of the VM is built
without the `-fPIC` for performance reasons.
@@ -956,31 +887,30 @@ suggestions for remedies.
Alternatively, instead of completely disabling it you could disable just
this one check.
- 1. Select System->Administration->SELinux Management
+ 1. Select System->Administration->SELinux Management
2. In the SELinux Management Tool which appears, select "Boolean" from the
menu on the left
3. Expand the "Memory Protection" group
4. Check the first item, labeled "Allow all unconfined executables to use
libraries requiring text relocation ..."
- * **Windows Error Messages:**
- `*** fatal error - couldn't allocate heap, ... `
- `rm fails with "Directory not empty"`
- `unzip fails with "cannot create ... Permission denied"`
+ * **Windows Error Messages:** \
+ `*** fatal error - couldn't allocate heap, ... ` \
+ `rm fails with "Directory not empty"` \
+ `unzip fails with "cannot create ... Permission denied"` \
`unzip fails with "cannot create ... Error 50"`
The CYGWIN software can conflict with other non-CYGWIN software. See the
CYGWIN FAQ section on [BLODA (applications that interfere with
CYGWIN)](http://cygwin.com/faq/faq.using.html#faq.using.bloda).
- * **Windows Error Message: `spawn failed`**
+ * **Windows Error Message: `spawn failed`** \
Try rebooting the system, or there could be some kind of issue with the disk
or disk partition being used. Sometimes it comes with a "Permission Denied"
message.
-*****
+-------------------------------------------------------------------------------
-
## Appendix B: GNU make
The Makefiles in the OpenJDK are only valid when used with the GNU version of
@@ -998,11 +928,10 @@ about using GNU make:
* **Mac OS X:** The XCode "command line tools" must be installed on your Mac.
Information on GNU make, and access to ftp download sites, are available on the
-[GNU make web site ](http://www.gnu.org/software/make/make.html). The latest
+[GNU make web site](http://www.gnu.org/software/make/make.html). The latest
source to GNU make is available at
[ftp.gnu.org/pub/gnu/make/](http://ftp.gnu.org/pub/gnu/make/).
-
### Building GNU make
First step is to get the GNU make 3.81 or newer source from
@@ -1012,9 +941,8 @@ little different depending on the OS but is basically done with:
bash ./configure
make
-*****
+-------------------------------------------------------------------------------
-
## Appendix C: Build Environments
### Minimum Build Environments
@@ -1062,79 +990,19 @@ problem. Similarly, compilation problems related to changes to the
newer, or unreleased OS versions. Please report these types of problems as bugs
so that they can be dealt with accordingly.
->
-
-
-
Base OS and Architecture
-
OS
-
C/C++ Compiler
-
Bootstrap JDK
-
Processors
-
RAM Minimum
-
DISK Needs
-
-
-
-
-
Linux X86 (32-bit) and X64 (64-bit)
-
Fedora 9
-
gcc 4.3
-
JDK 7u7
-
2 or more
-
1 GB
-
6 GB
-
-
-
Solaris SPARC (32-bit) and SPARCV9 (64-bit)
-
Solaris 10 Update 6
-
Studio 12 Update 1 + patches
-
JDK 7u7
-
4 or more
-
4 GB
-
8 GB
-
-
-
Solaris X86 (32-bit) and X64 (64-bit)
-
Solaris 10 Update 6
-
Studio 12 Update 1 + patches
-
JDK 7u7
-
4 or more
-
4 GB
-
8 GB
-
-
-
Windows X86 (32-bit)
-
Windows XP
-
Microsoft Visual Studio C++ 2010 Professional Edition
-
JDK 7u7
-
2 or more
-
2 GB
-
6 GB
-
-
-
Windows X64 (64-bit)
-
Windows Server 2003 - Enterprise x64 Edition
-
Microsoft Visual Studio C++ 2010 Professional Edition
-
JDK 7u7
-
2 or more
-
2 GB
-
6 GB
-
-
-
Mac OS X X64 (64-bit)
-
Mac OS X 10.7 "Lion"
-
XCode 4.5.2 or newer
-
JDK 7u7
-
2 or more
-
4 GB
-
6 GB
-
-
-
-
-*****
-
-
+Bootstrap JDK: JDK 7u7
+
+ Base OS and Architecture OS C/C++ Compiler Processors RAM Minimum DISK Needs
+ --------------------------------------------- ---------------------------------------------- ------------------------------------------------------- ------------ ------------- ------------
+ Linux X86 (32-bit) and X64 (64-bit) Fedora 9 gcc 4.3 2 or more 1 GB 6 GB
+ Solaris SPARC (32-bit) and SPARCV9 (64-bit) Solaris 10 Update 6 Studio 12 Update 1 + patches 4 or more 4 GB 8 GB
+ Solaris X86 (32-bit) and X64 (64-bit) Solaris 10 Update 6 Studio 12 Update 1 + patches 4 or more 4 GB 8 GB
+ Windows X86 (32-bit) Windows XP Microsoft Visual Studio C++ 2010 Professional Edition 2 or more 2 GB 6 GB
+ Windows X64 (64-bit) Windows Server 2003 - Enterprise x64 Edition Microsoft Visual Studio C++ 2010 Professional Edition 2 or more 2 GB 6 GB
+ Mac OS X X64 (64-bit) Mac OS X 10.7 "Lion" XCode 4.5.2 or newer 2 or more 4 GB 6 GB
+
+-------------------------------------------------------------------------------
+
### Specific Developer Build Environments
We won't be listing all the possible environments, but we will try to provide
@@ -1259,7 +1127,7 @@ In addition, it is necessary to set a few environment variables for the build:
export LANG=C
export PATH="/opt/SunStudioExpress/bin:${PATH}"
-*****
+-------------------------------------------------------------------------------
End of the OpenJDK build README document.
diff --git a/hotspot/README b/hotspot/README
deleted file mode 100644
index 19afb261fc3..00000000000
--- a/hotspot/README
+++ /dev/null
@@ -1,14 +0,0 @@
-README:
- This file should be located at the top of the hotspot Mercurial repository.
-
- See http://openjdk.java.net/ for more information about the OpenJDK.
-
- See ../README-builds.html for complete details on build machine requirements.
-
-Simple Build Instructions:
-
- cd make && gnumake
-
- The files that will be imported into the jdk build will be in the "build"
- directory.
-
diff --git a/hotspot/src/share/vm/c1/c1_LIRGenerator.cpp b/hotspot/src/share/vm/c1/c1_LIRGenerator.cpp
index b73e39a8528..8b4b8834035 100644
--- a/hotspot/src/share/vm/c1/c1_LIRGenerator.cpp
+++ b/hotspot/src/share/vm/c1/c1_LIRGenerator.cpp
@@ -79,6 +79,7 @@ void PhiResolverState::reset(int max_vregs) {
PhiResolver::PhiResolver(LIRGenerator* gen, int max_vregs)
: _gen(gen)
, _state(gen->resolver_state())
+ , _loop(NULL)
, _temp(LIR_OprFact::illegalOpr)
{
// reinitialize the shared state arrays
diff --git a/hotspot/src/share/vm/c1/c1_RangeCheckElimination.cpp b/hotspot/src/share/vm/c1/c1_RangeCheckElimination.cpp
index d17814e2af9..397b88a08fe 100644
--- a/hotspot/src/share/vm/c1/c1_RangeCheckElimination.cpp
+++ b/hotspot/src/share/vm/c1/c1_RangeCheckElimination.cpp
@@ -379,8 +379,11 @@ void RangeCheckEliminator::add_access_indexed_info(InstructionList &indices, int
aii->_max = idx;
aii->_list = new AccessIndexedList();
} else if (idx >= aii->_min && idx <= aii->_max) {
- remove_range_check(ai);
- return;
+ // Guard against underflow/overflow (see 'range_cond' check in RangeCheckEliminator::in_block_motion)
+ if (aii->_max < 0 || (aii->_max + min_jint) <= aii->_min) {
+ remove_range_check(ai);
+ return;
+ }
}
aii->_min = MIN2(aii->_min, idx);
aii->_max = MAX2(aii->_max, idx);
@@ -423,9 +426,9 @@ void RangeCheckEliminator::in_block_motion(BlockBegin *block, AccessIndexedList
}
}
} else {
- int last_integer = 0;
+ jint last_integer = 0;
Instruction *last_instruction = index;
- int base = 0;
+ jint base = 0;
ArithmeticOp *ao = index->as_ArithmeticOp();
while (ao != NULL && (ao->x()->as_Constant() || ao->y()->as_Constant()) && (ao->op() == Bytecodes::_iadd || ao->op() == Bytecodes::_isub)) {
@@ -437,12 +440,12 @@ void RangeCheckEliminator::in_block_motion(BlockBegin *block, AccessIndexedList
}
if (c) {
- int value = c->type()->as_IntConstant()->value();
+ jint value = c->type()->as_IntConstant()->value();
if (value != min_jint) {
if (ao->op() == Bytecodes::_isub) {
value = -value;
}
- base += value;
+ base = java_add(base, value);
last_integer = base;
last_instruction = other;
}
@@ -464,12 +467,12 @@ void RangeCheckEliminator::in_block_motion(BlockBegin *block, AccessIndexedList
assert(info != NULL, "Info must not be null");
// if idx < 0, max > 0, max + idx may fall between 0 and
- // length-1 and if min < 0, min + idx may overflow and be >=
+ // length-1 and if min < 0, min + idx may underflow/overflow and be >=
// 0. The predicate wouldn't trigger but some accesses could
// be with a negative index. This test guarantees that for the
// min and max value that are kept the predicate can't let
// some incorrect accesses happen.
- bool range_cond = (info->_max < 0 || info->_max + min_jint <= info->_min);
+ bool range_cond = (info->_max < 0 || (info->_max + min_jint) <= info->_min);
// Generate code only if more than 2 range checks can be eliminated because of that.
// 2 because at least 2 comparisons are done
@@ -809,7 +812,7 @@ void RangeCheckEliminator::process_access_indexed(BlockBegin *loop_header, Block
);
remove_range_check(ai);
- } else if (_optimistic && loop_header) {
+ } else if (false && _optimistic && loop_header) {
assert(ai->array(), "Array must not be null!");
assert(ai->index(), "Index must not be null!");
diff --git a/hotspot/src/share/vm/classfile/verifier.cpp b/hotspot/src/share/vm/classfile/verifier.cpp
index c653e2b5a9e..2dddd1fdedc 100644
--- a/hotspot/src/share/vm/classfile/verifier.cpp
+++ b/hotspot/src/share/vm/classfile/verifier.cpp
@@ -2081,11 +2081,12 @@ void ClassVerifier::verify_switch(
"low must be less than or equal to high in tableswitch");
return;
}
- keys = high - low + 1;
- if (keys < 0) {
+ int64_t keys64 = ((int64_t)high - low) + 1;
+ if (keys64 > 65535) { // Max code length
verify_error(ErrorContext::bad_code(bci), "too many keys in tableswitch");
return;
}
+ keys = (int)keys64;
delta = 1;
} else {
keys = (int)Bytes::get_Java_u4(aligned_bcp + jintSize);
diff --git a/hotspot/src/share/vm/code/nmethod.hpp b/hotspot/src/share/vm/code/nmethod.hpp
index 7c239703610..49d9b28d42e 100644
--- a/hotspot/src/share/vm/code/nmethod.hpp
+++ b/hotspot/src/share/vm/code/nmethod.hpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -371,8 +371,8 @@ class nmethod : public CodeBlob {
// type info
bool is_nmethod() const { return true; }
- bool is_java_method() const { return !method()->is_native(); }
- bool is_native_method() const { return method()->is_native(); }
+ bool is_java_method() const { return _method != NULL && !method()->is_native(); }
+ bool is_native_method() const { return _method != NULL && method()->is_native(); }
bool is_osr_method() const { return _entry_bci != InvocationEntryBci; }
bool is_compiled_by_c1() const;
diff --git a/hotspot/src/share/vm/interpreter/bytecodes.cpp b/hotspot/src/share/vm/interpreter/bytecodes.cpp
index 5fce86681da..d06e5e499e5 100644
--- a/hotspot/src/share/vm/interpreter/bytecodes.cpp
+++ b/hotspot/src/share/vm/interpreter/bytecodes.cpp
@@ -114,12 +114,18 @@ int Bytecodes::special_length_at(Bytecodes::Code code, address bcp, address end)
if (end != NULL && aligned_bcp + 3*jintSize >= end) {
return -1; // don't read past end of code buffer
}
+ // Promote calculation to signed 64 bits to do range checks, used by the verifier.
jlong lo = (jint)Bytes::get_Java_u4(aligned_bcp + 1*jintSize);
jlong hi = (jint)Bytes::get_Java_u4(aligned_bcp + 2*jintSize);
jlong len = (aligned_bcp - bcp) + (3 + hi - lo + 1)*jintSize;
- // only return len if it can be represented as a positive int;
- // return -1 otherwise
- return (len > 0 && len == (int)len) ? len : -1;
+ // Only return len if it can be represented as a positive int and lo <= hi.
+ // The caller checks for bytecode stream overflow.
+ if (lo <= hi && len == (int)len) {
+ assert(len > 0, "must be");
+ return (int)len;
+ } else {
+ return -1;
+ }
}
case _lookupswitch: // fall through
@@ -131,9 +137,13 @@ int Bytecodes::special_length_at(Bytecodes::Code code, address bcp, address end)
}
jlong npairs = (jint)Bytes::get_Java_u4(aligned_bcp + jintSize);
jlong len = (aligned_bcp - bcp) + (2 + 2*npairs)*jintSize;
- // only return len if it can be represented as a positive int;
- // return -1 otherwise
- return (len > 0 && len == (int)len) ? len : -1;
+ // Only return len if it can be represented as a positive int and npairs >= 0.
+ if (npairs >= 0 && len == (int)len) {
+ assert(len > 0, "must be");
+ return (int)len;
+ } else {
+ return -1;
+ }
}
}
// Note: Length functions must return <=0 for invalid bytecodes.
diff --git a/hotspot/src/share/vm/opto/ifnode.cpp b/hotspot/src/share/vm/opto/ifnode.cpp
index 68f068d06e9..51579032e35 100644
--- a/hotspot/src/share/vm/opto/ifnode.cpp
+++ b/hotspot/src/share/vm/opto/ifnode.cpp
@@ -882,6 +882,46 @@ Node *IfNode::Ideal(PhaseGVN *phase, bool can_reshape) {
// then we are guaranteed to fail, so just start interpreting there.
// We 'expand' the top 3 range checks to include all post-dominating
// checks.
+ //
+ // Example:
+ // a[i+x] // (1) 1 < x < 6
+ // a[i+3] // (2)
+ // a[i+4] // (3)
+ // a[i+6] // max = max of all constants
+ // a[i+2]
+ // a[i+1] // min = min of all constants
+ //
+ // If x < 3:
+ // (1) a[i+x]: Leave unchanged
+ // (2) a[i+3]: Replace with a[i+max] = a[i+6]: i+x < i+3 <= i+6 -> (2) is covered
+ // (3) a[i+4]: Replace with a[i+min] = a[i+1]: i+1 < i+4 <= i+6 -> (3) and all following checks are covered
+ // Remove all other a[i+c] checks
+ //
+ // If x >= 3:
+ // (1) a[i+x]: Leave unchanged
+ // (2) a[i+3]: Replace with a[i+min] = a[i+1]: i+1 < i+3 <= i+x -> (2) is covered
+ // (3) a[i+4]: Replace with a[i+max] = a[i+6]: i+1 < i+4 <= i+6 -> (3) and all following checks are covered
+ // Remove all other a[i+c] checks
+ //
+ // We only need the top 2 range checks if x is the min or max of all constants.
+ //
+ // This, however, only works if the interval [i+min,i+max] is not larger than max_int (i.e. abs(max - min) < max_int):
+ // The theoretical max size of an array is max_int with:
+ // - Valid index space: [0,max_int-1]
+ // - Invalid index space: [max_int,-1] // max_int, min_int, min_int - 1 ..., -1
+ //
+ // The size of the consecutive valid index space is smaller than the size of the consecutive invalid index space.
+ // If we choose min and max in such a way that:
+ // - abs(max - min) < max_int
+ // - i+max and i+min are inside the valid index space
+ // then all indices [i+min,i+max] must be in the valid index space. Otherwise, the invalid index space must be
+ // smaller than the valid index space which is never the case for any array size.
+ //
+ // Choosing a smaller array size only makes the valid index space smaller and the invalid index space larger and
+ // the argument above still holds.
+ //
+ // Note that the same optimization with the same maximal accepted interval size can also be found in C1.
+ const jlong maximum_number_of_min_max_interval_indices = (jlong)max_jint;
// The top 3 range checks seen
const int NRC =3;
@@ -915,13 +955,18 @@ Node *IfNode::Ideal(PhaseGVN *phase, bool can_reshape) {
found_immediate_dominator = true;
break;
}
- // Gather expanded bounds
- off_lo = MIN2(off_lo,offset2);
- off_hi = MAX2(off_hi,offset2);
- // Record top NRC range checks
- prev_checks[nb_checks%NRC].ctl = prev_dom;
- prev_checks[nb_checks%NRC].off = offset2;
- nb_checks++;
+
+ // "x - y" -> must add one to the difference for number of elements in [x,y]
+ const jlong diff = (jlong)MIN2(offset2, off_lo) - (jlong)MAX2(offset2, off_hi);
+ if (ABS(diff) < maximum_number_of_min_max_interval_indices) {
+ // Gather expanded bounds
+ off_lo = MIN2(off_lo, offset2);
+ off_hi = MAX2(off_hi, offset2);
+ // Record top NRC range checks
+ prev_checks[nb_checks % NRC].ctl = prev_dom;
+ prev_checks[nb_checks % NRC].off = offset2;
+ nb_checks++;
+ }
}
}
prev_dom = dom;
diff --git a/hotspot/src/share/vm/opto/loopnode.cpp b/hotspot/src/share/vm/opto/loopnode.cpp
index 957929e5dfb..8103d5cd92c 100644
--- a/hotspot/src/share/vm/opto/loopnode.cpp
+++ b/hotspot/src/share/vm/opto/loopnode.cpp
@@ -260,6 +260,49 @@ void PhaseIdealLoop::set_subtree_ctrl( Node *n ) {
set_early_ctrl( n );
}
+void PhaseIdealLoop::insert_loop_limit_check(ProjNode* limit_check_proj, Node* cmp_limit, Node* bol) {
+ Node* new_predicate_proj = create_new_if_for_predicate(limit_check_proj, NULL,
+ Deoptimization::Reason_loop_limit_check);
+ Node* iff = new_predicate_proj->in(0);
+ assert(iff->Opcode() == Op_If, "bad graph shape");
+ Node* conv = iff->in(1);
+ assert(conv->Opcode() == Op_Conv2B, "bad graph shape");
+ Node* opaq = conv->in(1);
+ assert(opaq->Opcode() == Op_Opaque1, "bad graph shape");
+ cmp_limit = _igvn.register_new_node_with_optimizer(cmp_limit);
+ bol = _igvn.register_new_node_with_optimizer(bol);
+ set_subtree_ctrl(bol);
+ _igvn.replace_input_of(iff, 1, bol);
+
+#ifndef PRODUCT
+ // report that the loop predication has been actually performed
+ // for this loop
+ if (TraceLoopLimitCheck) {
+ tty->print_cr("Counted Loop Limit Check generated:");
+ debug_only( bol->dump(2); )
+ }
+#endif
+}
+
+static int check_stride_overflow(jlong final_correction, const TypeInt* limit_t) {
+ if (final_correction > 0) {
+ if (limit_t->_lo > (max_jint - final_correction)) {
+ return -1;
+ }
+ if (limit_t->_hi > (max_jint - final_correction)) {
+ return 1;
+ }
+ } else {
+ if (limit_t->_hi < (min_jint - final_correction)) {
+ return -1;
+ }
+ if (limit_t->_lo < (min_jint - final_correction)) {
+ return 1;
+ }
+ }
+ return 0;
+}
+
//------------------------------is_counted_loop--------------------------------
bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
PhaseGVN *gvn = &_igvn;
@@ -463,51 +506,256 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
assert(x->Opcode() == Op_Loop, "regular loops only");
C->print_method(PHASE_BEFORE_CLOOPS, 3);
- Node *hook = new (C) Node(6);
+ Node* adjusted_limit = limit;
if (LoopLimitCheck) {
// ===================================================
- // Generate loop limit check to avoid integer overflow
- // in cases like next (cyclic loops):
+ // We can only convert this loop to a counted loop if we can guarantee that the iv phi will never overflow at runtime.
+ // This is an implicit assumption taken by some loop optimizations. We therefore must ensure this property at all cost.
+ // At this point, we've already excluded some trivial cases where an overflow could have been proven statically.
+ // But even though we cannot prove that an overflow will *not* happen, we still want to speculatively convert this loop
+ // to a counted loop. This can be achieved by adding additional iv phi overflow checks before the loop. If they fail,
+ // we trap and resume execution before the loop without having executed any iteration of the loop, yet.
//
- // for (i=0; i <= max_jint; i++) {}
- // for (i=0; i < max_jint; i+=2) {}
+ // These additional iv phi overflow checks can be inserted as Loop Limit Check Predicates above the Loop Limit Check
+ // Parse Predicate which captures a JVM state just before the entry of the loop. If there is no such Parse Predicate,
+ // we cannot generate a Loop Limit Check Predicate and thus cannot speculatively convert the loop to a counted loop.
//
+ // In the following, we only focus on int loops with stride > 0 to keep things simple. The argumentation and proof
+ // for stride < 0 is analogously. For long loops, we would replace max_int with max_long.
//
- // Limit check predicate depends on the loop test:
//
- // for(;i != limit; i++) --> limit <= (max_jint)
- // for(;i < limit; i+=stride) --> limit <= (max_jint - stride + 1)
- // for(;i <= limit; i+=stride) --> limit <= (max_jint - stride )
+ // The loop to be converted does not always need to have the often used shape:
//
+ // i = init
+ // i = init loop:
+ // do { ...
+ // // ... equivalent i+=stride
+ // i+=stride <==> if (i < limit)
+ // } while (i < limit); goto loop
+ // exit:
+ // ...
+ //
+ // where the loop exit check uses the post-incremented iv phi and a '<'-operator.
+ //
+ // We could also have '<='-operator (or '>='-operator for negative strides) or use the pre-incremented iv phi value
+ // in the loop exit check:
+ //
+ // i = init
+ // loop:
+ // ...
+ // if (i <= limit)
+ // i+=stride
+ // goto loop
+ // exit:
+ // ...
+ //
+ // Let's define the following terms:
+ // - iv_pre_i: The pre-incremented iv phi before the i-th iteration.
+ // - iv_post_i: The post-incremented iv phi after the i-th iteration.
+ //
+ // The iv_pre_i and iv_post_i have the following relation:
+ // iv_pre_i + stride = iv_post_i
+ //
+ // When converting a loop to a counted loop, we want to have a canonicalized loop exit check of the form:
+ // iv_post_i < adjusted_limit
+ //
+ // If that is not the case, we need to canonicalize the loop exit check by using different values for adjusted_limit:
+ // (LE1) iv_post_i < limit: Already canonicalized. We can directly use limit as adjusted_limit.
+ // -> adjusted_limit = limit.
+ // (LE2) iv_post_i <= limit:
+ // iv_post_i < limit + 1
+ // -> adjusted limit = limit + 1
+ // (LE3) iv_pre_i < limit:
+ // iv_pre_i + stride < limit + stride
+ // iv_post_i < limit + stride
+ // -> adjusted_limit = limit + stride
+ // (LE4) iv_pre_i <= limit:
+ // iv_pre_i < limit + 1
+ // iv_pre_i + stride < limit + stride + 1
+ // iv_post_i < limit + stride + 1
+ // -> adjusted_limit = limit + stride + 1
+ //
+ // Note that:
+ // (AL) limit <= adjusted_limit.
+ //
+ // The following loop invariant has to hold for counted loops with n iterations (i.e. loop exit check true after n-th
+ // loop iteration) and a canonicalized loop exit check to guarantee that no iv_post_i over- or underflows:
+ // (INV) For i = 1..n, min_int <= iv_post_i <= max_int
+ //
+ // To prove (INV), we require the following two conditions/assumptions:
+ // (i): adjusted_limit - 1 + stride <= max_int
+ // (ii): init < limit
+ //
+ // If we can prove (INV), we know that there can be no over- or underflow of any iv phi value. We prove (INV) by
+ // induction by assuming (i) and (ii).
+ //
+ // Proof by Induction
+ // ------------------
+ // > Base case (i = 1): We show that (INV) holds after the first iteration:
+ // min_int <= iv_post_1 = init + stride <= max_int
+ // Proof:
+ // First, we note that (ii) implies
+ // (iii) init <= limit - 1
+ // max_int >= adjusted_limit - 1 + stride [using (i)]
+ // >= limit - 1 + stride [using (AL)]
+ // >= init + stride [using (iii)]
+ // >= min_int [using stride > 0, no underflow]
+ // Thus, no overflow happens after the first iteration and (INV) holds for i = 1.
+ //
+ // Note that to prove the base case we need (i) and (ii).
+ //
+ // > Induction Hypothesis (i = j, j > 1): Assume that (INV) holds after the j-th iteration:
+ // min_int <= iv_post_j <= max_int
+ // > Step case (i = j + 1): We show that (INV) also holds after the j+1-th iteration:
+ // min_int <= iv_post_{j+1} = iv_post_j + stride <= max_int
+ // Proof:
+ // If iv_post_j >= adjusted_limit:
+ // We exit the loop after the j-th iteration, and we don't execute the j+1-th iteration anymore. Thus, there is
+ // also no iv_{j+1}. Since (INV) holds for iv_j, there is nothing left to prove.
+ // If iv_post_j < adjusted_limit:
+ // First, we note that:
+ // (iv) iv_post_j <= adjusted_limit - 1
+ // max_int >= adjusted_limit - 1 + stride [using (i)]
+ // >= iv_post_j + stride [using (iv)]
+ // >= min_int [using stride > 0, no underflow]
+ //
+ // Note that to prove the step case we only need (i).
+ //
+ // Thus, by assuming (i) and (ii), we proved (INV).
+ //
+ //
+ // It is therefore enough to add the following two Loop Limit Check Predicates to check assumptions (i) and (ii):
+ //
+ // (1) Loop Limit Check Predicate for (i):
+ // Using (i): adjusted_limit - 1 + stride <= max_int
+ //
+ // This condition is now restated to use limit instead of adjusted_limit:
+ //
+ // To prevent an overflow of adjusted_limit -1 + stride itself, we rewrite this check to
+ // max_int - stride + 1 >= adjusted_limit
+ // We can merge the two constants into
+ // canonicalized_correction = stride - 1
+ // which gives us
+ // max_int - canonicalized_correction >= adjusted_limit
+ //
+ // To directly use limit instead of adjusted_limit in the predicate condition, we split adjusted_limit into:
+ // adjusted_limit = limit + limit_correction
+ // Since stride > 0 and limit_correction <= stride + 1, we can restate this with no over- or underflow into:
+ // max_int - canonicalized_correction - limit_correction >= limit
+ // Since canonicalized_correction and limit_correction are both constants, we can replace them with a new constant:
+ // final_correction = canonicalized_correction + limit_correction
+ // which gives us:
+ //
+ // Final predicate condition:
+ // max_int - final_correction >= limit
+ //
+ // (2) Loop Limit Check Predicate for (ii):
+ // Using (ii): init < limit
+ //
+ // This Loop Limit Check Predicate is not required if we can prove at compile time that either:
+ // (2.1) type(init) < type(limit)
+ // In this case, we know:
+ // all possible values of init < all possible values of limit
+ // and we can skip the predicate.
+ //
+ // (2.2) init < limit is already checked before (i.e. found as a dominating check)
+ // In this case, we do not need to re-check the condition and can skip the predicate.
+ // This is often found for while- and for-loops which have the following shape:
+ //
+ // if (init < limit) { // Dominating test. Do not need the Loop Limit Check Predicate below.
+ // i = init;
+ // if (init >= limit) { trap(); } // Here we would insert the Loop Limit Check Predicate
+ // do {
+ // i += stride;
+ // } while (i < limit);
+ // }
+ //
+ // (2.3) init + stride <= max_int
+ // In this case, there is no overflow of the iv phi after the first loop iteration.
+ // In the proof of the base case above we showed that init + stride <= max_int by using assumption (ii):
+ // init < limit
+ // In the proof of the step case above, we did not need (ii) anymore. Therefore, if we already know at
+ // compile time that init + stride <= max_int then we have trivially proven the base case and that
+ // there is no overflow of the iv phi after the first iteration. In this case, we don't need to check (ii)
+ // again and can skip the predicate.
+
+
+ // Accounting for (LE3) and (LE4) where we use pre-incremented phis in the loop exit check.
+ const jlong limit_correction_for_pre_iv_exit_check = (phi_incr != NULL) ? stride_con : 0;
+
+ // Accounting for (LE2) and (LE4) where we use <= or >= in the loop exit check.
+ const bool includes_limit = (bt == BoolTest::le || bt == BoolTest::ge);
+ const jlong limit_correction_for_le_ge_exit_check = (includes_limit ? (stride_con > 0 ? 1 : -1) : 0);
+
+ const jlong limit_correction = limit_correction_for_pre_iv_exit_check + limit_correction_for_le_ge_exit_check;
+ const jlong canonicalized_correction = stride_con + (stride_con > 0 ? -1 : 1);
+ const jlong final_correction = canonicalized_correction + limit_correction;
+
+ int sov = check_stride_overflow(final_correction, limit_t);
+
+ // If sov==0, limit's type always satisfies the condition, for
+ // example, when it is an array length.
+ if (sov != 0) {
+ if (sov < 0) {
+ return false; // Bailout: integer overflow is certain.
+ }
+ // (1) Loop Limit Check Predicate is required because we could not statically prove that
+ // limit + final_correction = adjusted_limit - 1 + stride <= max_int
+ ProjNode *limit_check_proj = find_predicate_insertion_point(init_control, Deoptimization::Reason_loop_limit_check);
+ if (!limit_check_proj) {
+ // The Loop Limit Check Parse Predicate is not generated if this method trapped here before.
+#ifdef ASSERT
+ if (TraceLoopLimitCheck) {
+ tty->print("missing loop limit check:");
+ loop->dump_head();
+ x->dump(1);
+ }
+#endif
+ return false;
+ }
- // Check if limit is excluded to do more precise int overflow check.
- bool incl_limit = (bt == BoolTest::le || bt == BoolTest::ge);
- int stride_m = stride_con - (incl_limit ? 0 : (stride_con > 0 ? 1 : -1));
-
- // If compare points directly to the phi we need to adjust
- // the compare so that it points to the incr. Limit have
- // to be adjusted to keep trip count the same and the
- // adjusted limit should be checked for int overflow.
- if (phi_incr != NULL) {
- stride_m += stride_con;
- }
+ IfNode* check_iff = limit_check_proj->in(0)->as_If();
- if (limit->is_Con()) {
- int limit_con = limit->get_int();
- if ((stride_con > 0 && limit_con > (max_jint - stride_m)) ||
- (stride_con < 0 && limit_con < (min_jint - stride_m))) {
- // Bailout: it could be integer overflow.
+ if (!is_dominator(get_ctrl(limit), check_iff->in(0))) {
return false;
}
- } else if ((stride_con > 0 && limit_t->_hi <= (max_jint - stride_m)) ||
- (stride_con < 0 && limit_t->_lo >= (min_jint - stride_m))) {
- // Limit's type may satisfy the condition, for example,
- // when it is an array length.
- } else {
- // Generate loop's limit check.
- // Loop limit check predicate should be near the loop.
+
+ Node* cmp_limit;
+ Node* bol;
+
+ if (stride_con > 0) {
+ cmp_limit = new (C) CmpINode(limit, _igvn.intcon(max_jint - final_correction));
+ bol = new (C) BoolNode(cmp_limit, BoolTest::le);
+ } else {
+ cmp_limit = new (C) CmpINode(limit, _igvn.intcon(min_jint - final_correction));
+ bol = new (C) BoolNode(cmp_limit, BoolTest::ge);
+ }
+
+ insert_loop_limit_check(limit_check_proj, cmp_limit, bol);
+ }
+
+ // (2.3)
+ const bool init_plus_stride_could_overflow =
+ (stride_con > 0 && init_t->_hi > max_jint - stride_con) ||
+ (stride_con < 0 && init_t->_lo < min_jint - stride_con);
+ // (2.1)
+ const bool init_gte_limit = (stride_con > 0 && init_t->_hi >= limit_t->_lo) ||
+ (stride_con < 0 && init_t->_lo <= limit_t->_hi);
+
+ if (init_gte_limit && // (2.1)
+ ((bt == BoolTest::ne || init_plus_stride_could_overflow) && // (2.3)
+ !has_dominating_loop_limit_check(init_trip, limit, stride_con, init_control))) { // (2.2)
+ // (2) Iteration Loop Limit Check Predicate is required because neither (2.1), (2.2), nor (2.3) holds.
+ // We use the following condition:
+ // - stride > 0: init < limit
+ // - stride < 0: init > limit
+ //
+ // This predicate is always required if we have a non-equal-operator in the loop exit check (where stride = 1 is
+ // a requirement). We transform the loop exit check by using a less-than-operator. By doing so, we must always
+ // check that init < limit. Otherwise, we could have a different number of iterations at runtime.
+
ProjNode *limit_check_proj = find_predicate_insertion_point(init_control, Deoptimization::Reason_loop_limit_check);
if (!limit_check_proj) {
// The limit check predicate is not generated if this method trapped here before.
@@ -520,41 +768,38 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
#endif
return false;
}
-
IfNode* check_iff = limit_check_proj->in(0)->as_If();
+
+ if (!is_dominator(get_ctrl(limit), check_iff->in(0)) ||
+ !is_dominator(get_ctrl(init_trip), check_iff->in(0))) {
+ return false;
+ }
+
Node* cmp_limit;
Node* bol;
if (stride_con > 0) {
- cmp_limit = new (C) CmpINode(limit, _igvn.intcon(max_jint - stride_m));
- bol = new (C) BoolNode(cmp_limit, BoolTest::le);
+ cmp_limit = new (C) CmpINode(init_trip, limit);
+ bol = new (C) BoolNode(cmp_limit, BoolTest::lt);
} else {
- cmp_limit = new (C) CmpINode(limit, _igvn.intcon(min_jint - stride_m));
- bol = new (C) BoolNode(cmp_limit, BoolTest::ge);
+ cmp_limit = new (C) CmpINode(init_trip, limit);
+ bol = new (C) BoolNode(cmp_limit, BoolTest::gt);
}
- cmp_limit = _igvn.register_new_node_with_optimizer(cmp_limit);
- bol = _igvn.register_new_node_with_optimizer(bol);
- set_subtree_ctrl(bol);
-
- // Replace condition in original predicate but preserve Opaque node
- // so that previous predicates could be found.
- assert(check_iff->in(1)->Opcode() == Op_Conv2B &&
- check_iff->in(1)->in(1)->Opcode() == Op_Opaque1, "");
- Node* opq = check_iff->in(1)->in(1);
- _igvn.hash_delete(opq);
- opq->set_req(1, bol);
- // Update ctrl.
- set_ctrl(opq, check_iff->in(0));
- set_ctrl(check_iff->in(1), check_iff->in(0));
-#ifndef PRODUCT
- // report that the loop predication has been actually performed
- // for this loop
- if (TraceLoopLimitCheck) {
- tty->print_cr("Counted Loop Limit Check generated:");
- debug_only( bol->dump(2); )
+ insert_loop_limit_check(limit_check_proj, cmp_limit, bol);
+ }
+
+ if (bt == BoolTest::ne) {
+ // Now we need to canonicalize the loop condition if it is 'ne'.
+ assert(stride_con == 1 || stride_con == -1, "simple increment only - checked before");
+ if (stride_con > 0) {
+ // 'ne' can be replaced with 'lt' only when init < limit. This is ensured by the inserted predicate above.
+ bt = BoolTest::lt;
+ } else {
+ assert(stride_con < 0, "must be");
+ // 'ne' can be replaced with 'gt' only when init > limit. This is ensured by the inserted predicate above.
+ bt = BoolTest::gt;
}
-#endif
}
if (phi_incr != NULL) {
@@ -567,26 +812,15 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
// is converted to
// i = init; do {} while(++i < limit+1);
//
- limit = gvn->transform(new (C) AddINode(limit, stride));
- }
-
- // Now we need to canonicalize loop condition.
- if (bt == BoolTest::ne) {
- assert(stride_con == 1 || stride_con == -1, "simple increment only");
- // 'ne' can be replaced with 'lt' only when init < limit.
- if (stride_con > 0 && init_t->_hi < limit_t->_lo)
- bt = BoolTest::lt;
- // 'ne' can be replaced with 'gt' only when init > limit.
- if (stride_con < 0 && init_t->_lo > limit_t->_hi)
- bt = BoolTest::gt;
+ adjusted_limit = gvn->transform(new (C) AddINode(limit, stride));
}
- if (incl_limit) {
+ if (includes_limit) {
// The limit check guaranties that 'limit <= (max_jint - stride)' so
// we can convert 'i <= limit' to 'i < limit+1' since stride != 0.
//
Node* one = (stride_con > 0) ? gvn->intcon( 1) : gvn->intcon(-1);
- limit = gvn->transform(new (C) AddINode(limit, one));
+ adjusted_limit = gvn->transform(new (C) AddINode(adjusted_limit, one));
if (bt == BoolTest::le)
bt = BoolTest::lt;
else if (bt == BoolTest::ge)
@@ -594,10 +828,11 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
else
ShouldNotReachHere();
}
- set_subtree_ctrl( limit );
+ set_subtree_ctrl(adjusted_limit);
} else { // LoopLimitCheck
+ Node *hook = new (C) Node(6);
// If compare points to incr, we are ok. Otherwise the compare
// can directly point to the phi; in this case adjust the compare so that
// it points to the incr by adjusting the limit.
@@ -691,6 +926,11 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
limit = gvn->transform(new (C) AddINode(span,init_trip));
set_subtree_ctrl( limit );
+ adjusted_limit = limit;
+
+ // Free up intermediate goo
+ _igvn.remove_dead_node(hook);
+
} // LoopLimitCheck
if (!UseCountedLoopSafepoints) {
@@ -728,7 +968,7 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
}
cmp = cmp->clone();
cmp->set_req(1,incr);
- cmp->set_req(2,limit);
+ cmp->set_req(2, adjusted_limit);
cmp = _igvn.register_new_node_with_optimizer(cmp);
set_ctrl(cmp, iff->in(0));
@@ -802,9 +1042,6 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
}
}
- // Free up intermediate goo
- _igvn.remove_dead_node(hook);
-
#ifdef ASSERT
assert(l->is_valid_counted_loop(), "counted loop shape is messed up");
assert(l == loop->_head && l->phi() == phi && l->loopexit() == lex, "" );
@@ -821,6 +1058,37 @@ bool PhaseIdealLoop::is_counted_loop( Node *x, IdealLoopTree *loop ) {
return true;
}
+// Check if there is a dominating loop limit check of the form 'init < limit' starting at the loop entry.
+// If there is one, then we do not need to create an additional Loop Limit Check Predicate.
+bool PhaseIdealLoop::has_dominating_loop_limit_check(Node* init_trip, Node* limit, const int stride_con,
+ Node* loop_entry) {
+ // Eagerly call transform() on the Cmp and Bool node to common them up if possible. This is required in order to
+ // successfully find a dominated test with the If node below.
+ Node* cmp_limit;
+ Node* bol;
+ if (stride_con > 0) {
+ cmp_limit = _igvn.transform(new (C) CmpINode(init_trip, limit));
+ bol = _igvn.transform(new (C) BoolNode(cmp_limit, BoolTest::lt));
+ } else {
+ cmp_limit = _igvn.transform(new (C) CmpINode(init_trip, limit));
+ bol = _igvn.transform(new (C) BoolNode(cmp_limit, BoolTest::gt));
+ }
+
+ // Check if there is already a dominating init < limit check. If so, we do not need a Loop Limit Check Predicate.
+ IfNode* iff = new (C) IfNode(loop_entry, bol, PROB_MIN, COUNT_UNKNOWN);
+ // Also add fake IfProj nodes in order to call transform() on the newly created IfNode.
+ IfFalseNode* if_false = new (C) IfFalseNode(iff);
+ IfTrueNode* if_true = new (C) IfTrueNode(iff);
+ Node* dominated_iff = _igvn.transform(iff);
+ // ConI node? Found dominating test (IfNode::dominated_by() returns a ConI node).
+ const bool found_dominating_test = dominated_iff != NULL && dominated_iff->Opcode() == Op_ConI;
+
+ // Kill the If with its projections again in the next IGVN round by cutting it off from the graph.
+ _igvn.replace_input_of(iff, 0, C->top());
+ _igvn.replace_input_of(iff, 1, C->top());
+ return found_dominating_test;
+}
+
//----------------------exact_limit-------------------------------------------
Node* PhaseIdealLoop::exact_limit( IdealLoopTree *loop ) {
assert(loop->_head->is_CountedLoop(), "");
diff --git a/hotspot/src/share/vm/opto/loopnode.hpp b/hotspot/src/share/vm/opto/loopnode.hpp
index fa874a4c194..10f9694fe50 100644
--- a/hotspot/src/share/vm/opto/loopnode.hpp
+++ b/hotspot/src/share/vm/opto/loopnode.hpp
@@ -896,6 +896,10 @@ class PhaseIdealLoop : public PhaseTransform {
// Create a new if above the uncommon_trap_if_pattern for the predicate to be promoted
ProjNode* create_new_if_for_predicate(ProjNode* cont_proj, Node* new_entry,
Deoptimization::DeoptReason reason);
+ void insert_loop_limit_check(ProjNode* limit_check_proj, Node* cmp_limit, Node* bol);
+ bool has_dominating_loop_limit_check(Node* init_trip, Node* limit, int stride_con,
+ Node* loop_entry);
+
void register_control(Node* n, IdealLoopTree *loop, Node* pred);
// Clone loop predicates to cloned loops (peeled, unswitched)
diff --git a/hotspot/src/share/vm/opto/phaseX.hpp b/hotspot/src/share/vm/opto/phaseX.hpp
index 332b1175d1e..25f9edd1b70 100644
--- a/hotspot/src/share/vm/opto/phaseX.hpp
+++ b/hotspot/src/share/vm/opto/phaseX.hpp
@@ -433,9 +433,6 @@ class PhaseIterGVN : public PhaseGVN {
protected:
- // Idealize new Node 'n' with respect to its inputs and its value
- virtual Node *transform( Node *a_node );
-
// Warm up hash table, type table and initial worklist
void init_worklist( Node *a_root );
@@ -449,6 +446,9 @@ class PhaseIterGVN : public PhaseGVN {
PhaseIterGVN( PhaseGVN *gvn ); // Used after Parser
PhaseIterGVN( PhaseIterGVN *igvn, const char *dummy ); // Used after +VerifyOpto
+ // Idealize new Node 'n' with respect to its inputs and its value
+ virtual Node *transform( Node *a_node );
+
virtual PhaseIterGVN *is_IterGVN() { return this; }
Unique_Node_List _worklist; // Iterative worklist
diff --git a/hotspot/test/TEST.ROOT b/hotspot/test/TEST.ROOT
index c88dcf29558..55ec2d61184 100644
--- a/hotspot/test/TEST.ROOT
+++ b/hotspot/test/TEST.ROOT
@@ -37,3 +37,7 @@ requires.extraPropDefns = ../../test/jtreg-ext/requires/VMProps.java
requires.properties=sun.arch.data.model \
vm.flavor \
vm.bits
+
+# Path to libraries in the topmost test directory. This is needed so @library
+# does not need ../../ notation to reach them
+external.lib.roots = ../../
diff --git a/hotspot/test/runtime/memory/ReserveMemory.java b/hotspot/test/runtime/memory/ReserveMemory.java
index 9e37d52ccda..abb5a8193a7 100644
--- a/hotspot/test/runtime/memory/ReserveMemory.java
+++ b/hotspot/test/runtime/memory/ReserveMemory.java
@@ -21,10 +21,12 @@
* questions.
*/
+// Aix commits on touch, so this test won't work.
/*
* @test
* @key regression
* @bug 8012015
+ * @requires !(os.family == "aix")
* @summary Make sure reserved (but uncommitted) memory is not accessible
* @library /testlibrary /testlibrary/whitebox
* @build ReserveMemory
@@ -37,14 +39,6 @@
import sun.hotspot.WhiteBox;
public class ReserveMemory {
- private static boolean isWindows() {
- return System.getProperty("os.name").toLowerCase().startsWith("win");
- }
-
- private static boolean isOsx() {
- return System.getProperty("os.name").toLowerCase().startsWith("mac");
- }
-
public static void main(String args[]) throws Exception {
if (args.length > 0) {
WhiteBox.getWhiteBox().readReservedMemory();
@@ -61,9 +55,9 @@ public static void main(String args[]) throws Exception {
"test");
OutputAnalyzer output = new OutputAnalyzer(pb.start());
- if (isWindows()) {
+ if (Platform.isWindows()) {
output.shouldContain("EXCEPTION_ACCESS_VIOLATION");
- } else if (isOsx()) {
+ } else if (Platform.isOSX()) {
output.shouldContain("SIGBUS");
} else {
output.shouldContain("SIGSEGV");
diff --git a/jaxp/README b/jaxp/README
deleted file mode 100644
index 4d65125b34c..00000000000
--- a/jaxp/README
+++ /dev/null
@@ -1,19 +0,0 @@
-README:
-
- This file should be located at the top of the Mercurial repository.
-
- See http://openjdk.java.net/ for more information about the OpenJDK.
-
- See ../README-builds.html for complete details on build machine requirements.
-
-Simple Build Instructions:
- This repository can be loaded as a NetBeans project, built with ant, or
- built with GNU make, e.g.
- ant
- -OR-
- cd make && gnumake
-
- The built files that will be imported into the jdk build will be in the
- "dist" directory.
- Help information is available by running "ant -projecthelp" or "make help".
-
diff --git a/jaxws/README b/jaxws/README
deleted file mode 100644
index 4d65125b34c..00000000000
--- a/jaxws/README
+++ /dev/null
@@ -1,19 +0,0 @@
-README:
-
- This file should be located at the top of the Mercurial repository.
-
- See http://openjdk.java.net/ for more information about the OpenJDK.
-
- See ../README-builds.html for complete details on build machine requirements.
-
-Simple Build Instructions:
- This repository can be loaded as a NetBeans project, built with ant, or
- built with GNU make, e.g.
- ant
- -OR-
- cd make && gnumake
-
- The built files that will be imported into the jdk build will be in the
- "dist" directory.
- Help information is available by running "ant -projecthelp" or "make help".
-
diff --git a/jdk/README b/jdk/README
deleted file mode 100644
index fec16f9520d..00000000000
--- a/jdk/README
+++ /dev/null
@@ -1,29 +0,0 @@
-README:
- This file should be located at the top of the jdk Mercurial repository.
-
- See http://openjdk.java.net/ for more information about the OpenJDK.
-
-Simple Build Instructions:
-
- 1. Download and install a JDK 6 from
- http://java.sun.com/javase/downloads/index.jsp
- Set the environment variable ALT_BOOTDIR to the location of this JDK 6.
-
- 2. Either download and install the latest JDK7 from
- http://download.java.net/openjdk/jdk7/, or build your own complete
- OpenJDK7 by using the top level Makefile in the OpenJDK Mercurial forest.
- Set the environment variable ALT_JDK_IMPORT_PATH to the location of
- this latest JDK7 or OpenJDK7 build.
-
- 3. Check the sanity of doing a build with the current machine:
- cd make && gnumake sanity
- See README-builds.html if you run into problems.
-
- 4. Do a partial build of the jdk:
- cd make && gnumake all
-
- 5. Construct the images:
- cd make && gnumake images
- The resulting JDK image should be found in build/*/j2sdk-image
-
-
diff --git a/jdk/make/CompileLaunchers.gmk b/jdk/make/CompileLaunchers.gmk
index 1f414e2e05d..61973b3bd61 100644
--- a/jdk/make/CompileLaunchers.gmk
+++ b/jdk/make/CompileLaunchers.gmk
@@ -281,10 +281,17 @@ $(eval $(call SetupLauncher,jar, \
$(eval $(call SetupLauncher,jarsigner, \
-DJAVA_ARGS='{ "-J-ms8m"$(COMMA) "sun.security.tools.jarsigner.Main"$(COMMA) }'))
+# On s390 zero, run javac with larger stack
+ifeq ($(OPENJDK_TARGET_CPU), s390x)
+JAVAC_ARGS := '{ "-J-ms8m"$(COMMA) "-J-Xss3m"$(COMMA) "com.sun.tools.javac.Main"$(COMMA) }'
+else
+JAVAC_ARGS := '{ "-J-ms8m"$(COMMA) "com.sun.tools.javac.Main"$(COMMA) }'
+endif
+
$(eval $(call SetupLauncher,javac, \
-DEXPAND_CLASSPATH_WILDCARDS \
-DNEVER_ACT_AS_SERVER_CLASS_MACHINE \
- -DJAVA_ARGS='{ "-J-ms8m"$(COMMA) "com.sun.tools.javac.Main"$(COMMA) }'))
+ -DJAVA_ARGS=$(JAVAC_ARGS)))
ifeq ($(ENABLE_SJAVAC), yes)
$(eval $(call SetupLauncher,sjavac, \
diff --git a/jdk/make/data/cacerts/digicertcseccrootg5 b/jdk/make/data/cacerts/digicertcseccrootg5
new file mode 100644
index 00000000000..f77c81756c4
--- /dev/null
+++ b/jdk/make/data/cacerts/digicertcseccrootg5
@@ -0,0 +1,21 @@
+Owner: CN=DigiCert CS ECC P384 Root G5, O="DigiCert, Inc.", C=US
+Issuer: CN=DigiCert CS ECC P384 Root G5, O="DigiCert, Inc.", C=US
+Serial number: 3698fe712d519f3ced0fdb7b1643011
+Valid from: Fri Jan 15 00:00:00 GMT 2021 until: Sun Jan 14 23:59:59 GMT 2046
+Signature algorithm name: SHA384withECDSA
+Subject Public Key Algorithm: 384-bit EC (secp384r1) key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIICFjCCAZ2gAwIBAgIQA2mP5xLVGfPO0P23sWQwETAKBggqhkjOPQQDAzBNMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMTHERp
+Z2lDZXJ0IENTIEVDQyBQMzg0IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcNNDYw
+MTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIElu
+Yy4xJTAjBgNVBAMTHERpZ2lDZXJ0IENTIEVDQyBQMzg0IFJvb3QgRzUwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAR/FK2Ftpf9AiE1TWDoOJOTmz0FEG2v0/7v+rv7c5nz
+7DISjcdouIveiaKIVHeNuyF+M5VWlgno1YyhBLibbhkAYuhCKKZYN4QZVSZ7Mzdn
+8ppyraGurgBCPBx+uHqeIZyjQjBAMB0GA1UdDgQWBBTwjJhxOThlwjobphdmHcjt
+Zd6SNjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQD
+AwNnADBkAjAjb+EAGSZQ5EYgZYs3p8/rBuHMMskqoewyDXOiHgIcNWEqTmmrOXft
+l4jAfWvqid0CMEPx0VijdT6Gm7ZVEYsX9z3+CmnFf07GdRtalMvqERHGCCKI3tB6
+oqV56OMhp80Tsw==
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/digicertcsrsarootg5 b/jdk/make/data/cacerts/digicertcsrsarootg5
new file mode 100644
index 00000000000..06b3975c13e
--- /dev/null
+++ b/jdk/make/data/cacerts/digicertcsrsarootg5
@@ -0,0 +1,38 @@
+Owner: CN=DigiCert CS RSA4096 Root G5, O="DigiCert, Inc.", C=US
+Issuer: CN=DigiCert CS RSA4096 Root G5, O="DigiCert, Inc.", C=US
+Serial number: 6cee131be6d55c807f7c0c7fb44e620
+Valid from: Fri Jan 15 00:00:00 GMT 2021 until: Sun Jan 14 23:59:59 GMT 2046
+Signature algorithm name: SHA384withRSA
+Subject Public Key Algorithm: 4096-bit RSA key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIIFZDCCA0ygAwIBAgIQBs7hMb5tVcgH98DH+0TmIDANBgkqhkiG9w0BAQwFADBM
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJDAiBgNVBAMT
+G0RpZ2lDZXJ0IENTIFJTQTQwOTYgUm9vdCBHNTAeFw0yMTAxMTUwMDAwMDBaFw00
+NjAxMTQyMzU5NTlaMEwxCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5EaWdpQ2VydCwg
+SW5jLjEkMCIGA1UEAxMbRGlnaUNlcnQgQ1MgUlNBNDA5NiBSb290IEc1MIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtjNzgNhiA3AULBEcOV58rnyDhh3+
+Ji9MJK2L6oNfqbw9W/wLmEwCRzDs4v7s6DRbZl6/O9cspiX/jFmz3+rafCnZRlBy
+CB1u0RsK3R/NmYn6Dw9zxOGcHXUyzW+X2ipqlbJsyQnQ6gt7fRcGSZnv1t7gyFPU
+rsZ38Ya7Ixy4wN9Z94590e+C5iaLWji1/3XVstlPCfM3iFDaEaSKFBTRUwQAffNq
+RBj+UHAyBxyomg46HcUKH24LJmm3PKJXcCyG+kxulalYQ7msEtb/P+3XQxdrTM6e
+xJCr//oQUJqjkFfW54wQrp8WGs81HX/Xdu2KnDWnKLinXSH8MDfd3ggZTxXG56ba
+kEeO95RTTI5TAr79meXqhtCvAwLTm6qT8asojiAB/0z7zLcpQPWHpBITBR9DbtdR
+UJ84tCDtFwkSj8y5Ga+fzb5pEdOvVRBtF4Z5llLGsgCd5a84sDX0iGuPDgQ9fO6v
+zdNqEErGzYbKIj2hSlz7Dv+I31xip8C5HtmsbH44N/53kyXChYpPtTcGWgaBFPHO
+lJ2ZkeoyWs5nPW4EZq0MTy2jLvee9Xid9wr9fo/jQopVlrzxnzct/J5flf6MGBv8
+jv1LkK/XA2gSY6zik6eiywTlT2TOA/rGFJ/Zi+jM1GKMa+QALBmfGgbGMYFU+1Mk
+mq9Vmbqdda64wt0CAwEAAaNCMEAwHQYDVR0OBBYEFGgBk7HSSkBCaZRGLBxaiKkl
+tEdPMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEB
+DAUAA4ICAQCS/O64AnkXAlF9IcVJZ6ek8agkOOsMaOpaQmuc9HPBaUotszcFUEKY
+kp4GeSwuBpn2798roM2zkgGDtaDLJ7U8IxqYSaLsLZmlWUOs0rGT1lfXHLyT1sZA
+4bNvGVW3E9flQzOktavL2sExZA101iztw41u67uvGUdhYS3A9AW5b3jcOvdCQGVT
+kb2ZDZOSVKapN1krm8uZxrw99wSE8JQzHQ+CWjnLLkXDKBmjspuYyPwxa2CP9umG
+KLzgPH10XRaJW2kkxxCLxEu7Nk/UWT/DsKSRmfgu0UoBnfWIEu+/WhFqWU9Za1pn
+84+0Ew/A2C89KHKqGX8RfWpbn5XnX7eUT/E+oVr/Lcyd3yd3jzJzHGcKdvP6XLG/
+vB29DCibsscXZwszD8O9Ntz7ukILq+2Ew2LWhBapsQdrqW7uxs/msEQpwvCzYYAq
+i2/SFFwlh1Rk86RMwaH4p2vq/uo6/HnbDo/cxvPJ1Gze6YOhjh0i7Mk6sgB73Dun
+Qhp/3IupET2Op8Agb10JXUNE5o9mzKlbB/Hvm3oOs1ThlP0OLMaT11X9cZg1uAlK
+/8YpKCz2Ui3bFBiSJ+IWfozK1GG+goeR65g3P79fXXc/NKwbOEOraHKZMh46Ghml
+ozhMI9ej58zVKpIXkAtaS70WvfuGauKJmezkoFUYyaMIHxPgMghy0A==
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/digicerttlseccrootg5 b/jdk/make/data/cacerts/digicerttlseccrootg5
new file mode 100644
index 00000000000..c47b1a61228
--- /dev/null
+++ b/jdk/make/data/cacerts/digicerttlseccrootg5
@@ -0,0 +1,21 @@
+Owner: CN=DigiCert TLS ECC P384 Root G5, O="DigiCert, Inc.", C=US
+Issuer: CN=DigiCert TLS ECC P384 Root G5, O="DigiCert, Inc.", C=US
+Serial number: 9e09365acf7d9c8b93e1c0b042a2ef3
+Valid from: Fri Jan 15 00:00:00 GMT 2021 until: Sun Jan 14 23:59:59 GMT 2046
+Signature algorithm name: SHA384withECDSA
+Subject Public Key Algorithm: 384-bit EC (secp384r1) key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp
+Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2
+MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ
+bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS
+7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp
+0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS
+B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49
+BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ
+LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4
+DXZDjC5Ty3zfDBeWUA==
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/digicerttlsrsarootg5 b/jdk/make/data/cacerts/digicerttlsrsarootg5
new file mode 100644
index 00000000000..e8f3adb0d0f
--- /dev/null
+++ b/jdk/make/data/cacerts/digicerttlsrsarootg5
@@ -0,0 +1,38 @@
+Owner: CN=DigiCert TLS RSA4096 Root G5, O="DigiCert, Inc.", C=US
+Issuer: CN=DigiCert TLS RSA4096 Root G5, O="DigiCert, Inc.", C=US
+Serial number: 8f9b478a8fa7eda6a333789de7ccf8a
+Valid from: Fri Jan 15 00:00:00 GMT 2021 until: Sun Jan 14 23:59:59 GMT 2046
+Signature algorithm name: SHA384withRSA
+Subject Public Key Algorithm: 4096-bit RSA key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT
+HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN
+NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs
+IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+
+ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0
+2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp
+wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM
+pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD
+nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po
+sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx
+Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd
+Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX
+KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe
+XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL
+tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv
+TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN
+AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw
+GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H
+PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF
+O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ
+REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik
+AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv
+/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+
+p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw
+MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF
+qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK
+ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/emsigneccrootcag3 b/jdk/make/data/cacerts/emsigneccrootcag3
new file mode 100644
index 00000000000..06adad468be
--- /dev/null
+++ b/jdk/make/data/cacerts/emsigneccrootcag3
@@ -0,0 +1,22 @@
+Owner: CN=emSign ECC Root CA - G3, O=eMudhra Technologies Limited, OU=emSign PKI, C=IN
+Issuer: CN=emSign ECC Root CA - G3, O=eMudhra Technologies Limited, OU=emSign PKI, C=IN
+Serial number: 3cf607a968700eda8b84
+Valid from: Sun Feb 18 18:30:00 GMT 2018 until: Wed Feb 18 18:30:00 GMT 2043
+Signature algorithm name: SHA384withECDSA
+Subject Public Key Algorithm: 384-bit EC (secp384r1) key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/emsignrootcag1 b/jdk/make/data/cacerts/emsignrootcag1
new file mode 100644
index 00000000000..6e6e73cd622
--- /dev/null
+++ b/jdk/make/data/cacerts/emsignrootcag1
@@ -0,0 +1,29 @@
+Owner: CN=emSign Root CA - G1, O=eMudhra Technologies Limited, OU=emSign PKI, C=IN
+Issuer: CN=emSign Root CA - G1, O=eMudhra Technologies Limited, OU=emSign PKI, C=IN
+Serial number: 31f5e4620c6c58edd6d8
+Valid from: Sun Feb 18 18:30:00 GMT 2018 until: Wed Feb 18 18:30:00 GMT 2043
+Signature algorithm name: SHA256withRSA
+Subject Public Key Algorithm: 2048-bit RSA key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/emsignrootcag2 b/jdk/make/data/cacerts/emsignrootcag2
new file mode 100644
index 00000000000..aaea7ee051a
--- /dev/null
+++ b/jdk/make/data/cacerts/emsignrootcag2
@@ -0,0 +1,39 @@
+Owner: CN=emSign Root CA - G2, O=eMudhra Technologies Limited, OU=emSign PKI, C=IN
+Issuer: CN=emSign Root CA - G2, O=eMudhra Technologies Limited, OU=emSign PKI, C=IN
+Serial number: 864dbf0fe35ed77d8ed8
+Valid from: Sun Feb 18 18:30:00 GMT 2018 until: Wed Feb 18 18:30:00 GMT 2043
+Signature algorithm name: SHA384withRSA
+Subject Public Key Algorithm: 4096-bit RSA key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIIFlTCCA32gAwIBAgILAIZNvw/jXtd9jtgwDQYJKoZIhvcNAQEMBQAwZzELMAkG
+A1UEBhMCSU4xEzARBgNVBAsTCmVtU2lnbiBQS0kxJTAjBgNVBAoTHGVNdWRocmEg
+VGVjaG5vbG9naWVzIExpbWl0ZWQxHDAaBgNVBAMTE2VtU2lnbiBSb290IENBIC0g
+RzIwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBnMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBHMjCCAiIw
+DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMNwGIWW2kHfHK+sXTNwxF07K+IV
+ySTuyFM2r1v002wUfcdT+zs5OM5QbMYFFnedXQI6gCFLsjKrcaej48Zt37OyEb3i
+aPs7CsP4kAyTwzKH9aZe6gXYHrJq40/ZVMNcQVI2PcIp40B/SAN2gUZ+ZaUtIOvV
+jEx26/ebNaXRIsthlkOG/caB+QRwDw1tl7338Zlv0M2oTBUy4B3e7dGP5pgXH71M
+jqHPCoNo+xv9f0NTBT+hUDa8h8wUtcGQq9CDeJTpjWcD2bP2AMdVG6oVpMAUeUzo
+cCyglvtFdUMjggxBbw4qhau1HXPG8Ot9hwL7ZMi8tkTzrvUIxxb8G9LF/7kKeCE7
+tGZaVzDTnXuifl3msR4ErHsQ4P7lVu2AIjIAhrAXoedDidb7pMcf7TABdrYUT1Jo
+G/AiK+J9jO6GTjeADD4LMDSBZhHMuBK/PJ/g0kGBt+/C1L+/HURzQhJkMlRnM6Rv
+XoCtfKopSlns5trZmTi971Wjbn88QXP61lGpBCUPwCjs7rpOYvSUJtI+lcbF+37q
+kIqOXYkVT3cupDSpw+H89kFtj5GKY+Xny4LxY+3IvDIRiyd6ky1DPj713DI0yqve
+EpsIr3A0PdwuyUI7CS1jg0NnGFT6Xxyr0xB+VDt83FJYW8v16k2pbaQ4kVxA3aXd
+X9dZYyVR1S59KM75AgMBAAGjQjBAMB0GA1UdDgQWBBTt7E1FYRgo57MjKBEcTaUn
+DV7s9DAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0B
+AQwFAAOCAgEACFC/ilQg8KTCVBxFJW/sazomkS0kNYbEIZg4B3obqwsJ7SX98z8Z
+gfzBpz0nYClwwJjWbFN1R2zY8pCEot6/dgmA8Vbq0GxhwPM5YN/SZquNyRIxO3cU
+dlAcwf+vSezdVCf9wOzvSAF3q0a5ljvbdbNJNpfScQVp7UUd5sBsZk8jXO1KQ/go
+/Vf/GDPnrIFmxpAIGE3sgnO8lAv9FzUaAeuv7HWe47xN9J7+bQzF93yHuIXACPTL
+pQHhg2zMv5C7BAbuDHfbj1Cu294Z832yhSfBcziWGskOvl3es2EcHytbS9c9P+0z
+Mpka7zGC1FHrvLb/FoduH86TeZt0QjZ6pcplNzoaxDnDvzTJ6CC2Eny+qH/APFCu
+VUv5/wjwF+HPm8Pup2ARj9cEp92+0qcerfHacNq5hMeGZdbA/dzdUR/5z5zXdxAk
+nl8mcfGb0eMNSTXQmmB/i4AecNnr72uYjzlaXUGYN7Nrb6XouG0pnh0/BBtWWp0U
+ShIPpWEAqs7RJBj6+1ZUYXZ4ObrCw962DxhN2p19Hxw9LtuUUcLqqTPrFXYvwO4t
+ouj7KJnAkaTUfXGdEaFVtFig1EA30WzJY2X1vAQ7hVnniCjgaXAGqjsU6sklNM9n
+xDx5rFCCCEtj9Kh8UHjGK2QqgP5kwgttjOApQMaCoezMfK4KD7WpOXU=
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/letsencryptisrgx2 b/jdk/make/data/cacerts/letsencryptisrgx2
new file mode 100644
index 00000000000..054104adbe2
--- /dev/null
+++ b/jdk/make/data/cacerts/letsencryptisrgx2
@@ -0,0 +1,21 @@
+Owner: CN=ISRG Root X2, O=Internet Security Research Group, C=US
+Issuer: CN=ISRG Root X2, O=Internet Security Research Group, C=US
+Serial number: 41d29dd172eaeea780c12c6ce92f8752
+Valid from: Fri Sep 04 00:00:00 GMT 2020 until: Mon Sep 17 16:00:00 GMT 2040
+Signature algorithm name: SHA384withECDSA
+Subject Public Key Algorithm: 384-bit EC (secp384r1) key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw
+CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg
+R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00
+MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT
+ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW
++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9
+ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI
+zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW
+tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1
+/q4AaOeMSQ+2b1tbFfLn
+-----END CERTIFICATE-----
diff --git a/jdk/make/data/cacerts/teliarootcav2 b/jdk/make/data/cacerts/teliarootcav2
new file mode 100644
index 00000000000..2e913fe85f7
--- /dev/null
+++ b/jdk/make/data/cacerts/teliarootcav2
@@ -0,0 +1,39 @@
+Owner: CN=Telia Root CA v2, O=Telia Finland Oyj, C=FI
+Issuer: CN=Telia Root CA v2, O=Telia Finland Oyj, C=FI
+Serial number: 1675f27d6fe7ae3e4acbe095b059e
+Valid from: Thu Nov 29 11:55:54 GMT 2018 until: Sun Nov 29 11:55:54 GMT 2043
+Signature algorithm name: SHA256withRSA
+Subject Public Key Algorithm: 4096-bit RSA key
+Version: 3
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx
+CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE
+AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1
+NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ
+MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq
+AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9
+vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9
+lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD
+n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT
+7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o
+6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC
+TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6
+WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R
+DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI
+pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj
+YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy
+rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ
+8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi
+0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM
+A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS
+SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K
+TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF
+6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er
+3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt
+Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT
+VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW
+ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA
+rBPuUBQemMc=
+-----END CERTIFICATE-----
diff --git a/jdk/src/share/classes/com/sun/crypto/provider/RSACipher.java b/jdk/src/share/classes/com/sun/crypto/provider/RSACipher.java
index 9c57bb0c06d..c90f7efcbdf 100644
--- a/jdk/src/share/classes/com/sun/crypto/provider/RSACipher.java
+++ b/jdk/src/share/classes/com/sun/crypto/provider/RSACipher.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -98,6 +98,7 @@ public final class RSACipher extends CipherSpi {
// cipher parameter for OAEP padding and TLS RSA premaster secret
private AlgorithmParameterSpec spec = null;
+ private boolean forTlsPremasterSecret = false;
// buffer for the data
private byte[] buffer;
@@ -290,6 +291,7 @@ private void init(int opmode, Key key, SecureRandom random,
}
spec = params;
+ forTlsPremasterSecret = true;
this.random = random; // for TLS RSA premaster secret
}
int blockType = (mode <= MODE_DECRYPT) ? RSAPadding.PAD_BLOCKTYPE_2
@@ -353,21 +355,38 @@ private byte[] doFinal() throws BadPaddingException,
switch (mode) {
case MODE_SIGN:
paddingCopy = padding.pad(buffer, 0, bufOfs);
- result = RSACore.rsa(paddingCopy, privateKey, true);
+ if (paddingCopy != null) {
+ result = RSACore.rsa(paddingCopy, privateKey, true);
+ } else {
+ throw new BadPaddingException("Padding error in signing");
+ }
break;
case MODE_VERIFY:
byte[] verifyBuffer = RSACore.convert(buffer, 0, bufOfs);
paddingCopy = RSACore.rsa(verifyBuffer, publicKey);
result = padding.unpad(paddingCopy);
+ if (result == null) {
+ throw new BadPaddingException
+ ("Padding error in verification");
+ }
break;
case MODE_ENCRYPT:
paddingCopy = padding.pad(buffer, 0, bufOfs);
- result = RSACore.rsa(paddingCopy, publicKey);
+ if (paddingCopy != null) {
+ result = RSACore.rsa(paddingCopy, publicKey);
+ } else {
+ throw new BadPaddingException
+ ("Padding error in encryption");
+ }
break;
case MODE_DECRYPT:
byte[] decryptBuffer = RSACore.convert(buffer, 0, bufOfs);
paddingCopy = RSACore.rsa(decryptBuffer, privateKey, false);
result = padding.unpad(paddingCopy);
+ if (result == null && !forTlsPremasterSecret) {
+ throw new BadPaddingException
+ ("Padding error in decryption");
+ }
break;
default:
throw new AssertionError("Internal error");
@@ -376,9 +395,9 @@ private byte[] doFinal() throws BadPaddingException,
} finally {
Arrays.fill(buffer, 0, bufOfs, (byte)0);
bufOfs = 0;
- if (paddingCopy != null // will not happen
+ if (paddingCopy != null
&& paddingCopy != buffer // already cleaned
- && paddingCopy != result) { // DO NOT CLEAN, THIS IS RESULT!
+ && paddingCopy != result) { // DO NOT CLEAN, THIS IS RESULT
Arrays.fill(paddingCopy, (byte)0);
}
}
@@ -452,26 +471,22 @@ protected Key engineUnwrap(byte[] wrappedKey, String algorithm,
boolean isTlsRsaPremasterSecret =
algorithm.equals("TlsRsaPremasterSecret");
- Exception failover = null;
byte[] encoded = null;
update(wrappedKey, 0, wrappedKey.length);
try {
encoded = doFinal();
- } catch (BadPaddingException e) {
- if (isTlsRsaPremasterSecret) {
- failover = e;
- } else {
- throw new InvalidKeyException("Unwrapping failed", e);
- }
- } catch (IllegalBlockSizeException e) {
- // should not occur, handled with length check above
+ } catch (BadPaddingException | IllegalBlockSizeException e) {
+ // BadPaddingException cannot happen for TLS RSA unwrap.
+ // In that case, padding error is indicated by returning null.
+ // IllegalBlockSizeException cannot happen in any case,
+ // because of the length check above.
throw new InvalidKeyException("Unwrapping failed", e);
}
try {
if (isTlsRsaPremasterSecret) {
- if (!(spec instanceof TlsRsaPremasterSecretParameterSpec)) {
+ if (!forTlsPremasterSecret) {
throw new IllegalStateException(
"No TlsRsaPremasterSecretParameterSpec specified");
}
@@ -480,7 +495,7 @@ protected Key engineUnwrap(byte[] wrappedKey, String algorithm,
encoded = KeyUtil.checkTlsPreMasterSecretKey(
((TlsRsaPremasterSecretParameterSpec) spec).getClientVersion(),
((TlsRsaPremasterSecretParameterSpec) spec).getServerVersion(),
- random, encoded, (failover != null));
+ random, encoded, encoded == null);
}
return ConstructKeys.constructKey(encoded, algorithm, type);
diff --git a/jdk/src/share/classes/com/sun/media/sound/JARSoundbankReader.java b/jdk/src/share/classes/com/sun/media/sound/JARSoundbankReader.java
index a196ddcd76e..7bc1664f9c2 100644
--- a/jdk/src/share/classes/com/sun/media/sound/JARSoundbankReader.java
+++ b/jdk/src/share/classes/com/sun/media/sound/JARSoundbankReader.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2007, 2022, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2007, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -31,6 +31,7 @@
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLClassLoader;
+import java.security.AccessController;
import java.util.ArrayList;
import java.util.Objects;
import javax.sound.midi.InvalidMidiDataException;
@@ -38,6 +39,7 @@
import javax.sound.midi.spi.SoundbankReader;
import sun.reflect.misc.ReflectUtil;
+import sun.security.action.GetBooleanAction;
/**
* JarSoundbankReader is used to read soundbank object from jar files.
@@ -46,12 +48,15 @@
*/
public final class JARSoundbankReader extends SoundbankReader {
- /*
- * Name of the system property that enables the Jar soundbank loading
- * true if jar sound bank is allowed to be loaded
- * default is false
+ /**
+ * Value of the system property that enables the Jar soundbank loading
+ * {@code true} if jar sound bank is allowed to be loaded default is
+ * {@code false}.
*/
- private final static String JAR_SOUNDBANK_ENABLED = "jdk.sound.jarsoundbank";
+ @SuppressWarnings("removal")
+ private static final boolean JAR_SOUNDBANK_ENABLED =
+ AccessController.doPrivileged(
+ new GetBooleanAction("jdk.sound.jarsoundbank"));
private static boolean isZIP(URL url) {
boolean ok = false;
@@ -77,7 +82,7 @@ private static boolean isZIP(URL url) {
public Soundbank getSoundbank(URL url)
throws InvalidMidiDataException, IOException {
Objects.requireNonNull(url);
- if (!Boolean.getBoolean(JAR_SOUNDBANK_ENABLED) || !isZIP(url))
+ if (!JAR_SOUNDBANK_ENABLED || !isZIP(url))
return null;
ArrayList soundbanks = new ArrayList();
diff --git a/jdk/src/share/classes/java/util/jar/JarFile.java b/jdk/src/share/classes/java/util/jar/JarFile.java
index a26dcc4a1c7..ac2e1c9d6a8 100644
--- a/jdk/src/share/classes/java/util/jar/JarFile.java
+++ b/jdk/src/share/classes/java/util/jar/JarFile.java
@@ -436,7 +436,9 @@ private byte[] getBytes(ZipEntry ze) throws IOException {
throw new IOException("Unsupported size: " + uncompressedSize +
" for JarEntry " + ze.getName() +
". Allowed max size: " +
- SignatureFileVerifier.MAX_SIG_FILE_SIZE + " bytes");
+ SignatureFileVerifier.MAX_SIG_FILE_SIZE + " bytes. " +
+ "You can use the jdk.jar.maxSignatureFileSize " +
+ "system property to increase the default value.");
}
int len = (int)uncompressedSize;
byte[] b = IOUtils.readAllBytes(is);
diff --git a/jdk/src/share/classes/javax/security/auth/kerberos/package-info.java b/jdk/src/share/classes/javax/security/auth/kerberos/package-info.java
index 293745479d8..0853663a1f2 100644
--- a/jdk/src/share/classes/javax/security/auth/kerberos/package-info.java
+++ b/jdk/src/share/classes/javax/security/auth/kerberos/package-info.java
@@ -48,6 +48,12 @@
* {@code /lib/security} and failing that, in an OS-specific
* location.
*
+ * The {@code krb5.conf} file is formatted in the Windows INI file style,
+ * which contains a series of relations grouped into different sections.
+ * Each relation contains a key and a value, the value can be an arbitrary
+ * string or a boolean value. A boolean value can be one of "true", "false",
+ * "yes", or "no", case-insensitive.
+ *
* @since JDK1.4
*/
package javax.security.auth.kerberos;
diff --git a/jdk/src/share/classes/org/jcp/xml/dsig/internal/dom/DOMSignatureMethod.java b/jdk/src/share/classes/org/jcp/xml/dsig/internal/dom/DOMSignatureMethod.java
index b3da7acaad2..4bb7b278f2c 100644
--- a/jdk/src/share/classes/org/jcp/xml/dsig/internal/dom/DOMSignatureMethod.java
+++ b/jdk/src/share/classes/org/jcp/xml/dsig/internal/dom/DOMSignatureMethod.java
@@ -312,7 +312,6 @@ byte[] sign(Key key, SignedInfo si, XMLSignContext context)
}
signature.initSign((PrivateKey)key);
LOG.debug("Signature provider: {}", signature.getProvider());
- LOG.debug("Signing with key: {}", key);
LOG.debug("JCA Algorithm: {}", getJCAAlgorithm());
try (SignerOutputStream outputStream = new SignerOutputStream(signature)) {
diff --git a/jdk/src/share/classes/sun/security/krb5/Config.java b/jdk/src/share/classes/sun/security/krb5/Config.java
index 117acb840c8..3b4a52b5f3c 100644
--- a/jdk/src/share/classes/sun/security/krb5/Config.java
+++ b/jdk/src/share/classes/sun/security/krb5/Config.java
@@ -449,23 +449,6 @@ public int getIntValue(String... keys) {
return value;
}
- /**
- * Gets the boolean value for the specified keys.
- * @param keys the keys
- * @return the boolean value, false is returned if it cannot be
- * found or the value is not "true" (case insensitive).
- * @throw IllegalArgumentException if any of the keys is illegal
- * @see #get(java.lang.String[])
- */
- public boolean getBooleanValue(String... keys) {
- String val = get(keys);
- if (val != null && val.equalsIgnoreCase("true")) {
- return true;
- } else {
- return false;
- }
- }
-
/**
* Parses a string to an integer. The convertible strings include the
* string representations of positive integers, negative integers, and
@@ -474,7 +457,7 @@ public boolean getBooleanValue(String... keys) {
*
* @param input the String to be converted to an Integer.
* @return an numeric value represented by the string
- * @exception NumberFormationException if the String does not contain a
+ * @exception NumberFormatException if the String does not contain a
* parsable integer.
*/
private int parseIntValue(String input) throws NumberFormatException {
@@ -1060,20 +1043,13 @@ public void resetDefaultRealm(String realm) {
* use addresses if "no_addresses" or "noaddresses" is set to false
*/
public boolean useAddresses() {
- boolean useAddr = false;
- // use addresses if "no_addresses" is set to false
- String value = get("libdefaults", "no_addresses");
- useAddr = (value != null && value.equalsIgnoreCase("false"));
- if (useAddr == false) {
- // use addresses if "noaddresses" is set to false
- value = get("libdefaults", "noaddresses");
- useAddr = (value != null && value.equalsIgnoreCase("false"));
- }
- return useAddr;
+ return getBooleanObject("libdefaults", "no_addresses") == Boolean.FALSE ||
+ getBooleanObject("libdefaults", "noaddresses") == Boolean.FALSE;
}
/**
- * Check if need to use DNS to locate Kerberos services
+ * Check if need to use DNS to locate Kerberos services for name. If not
+ * defined, check dns_fallback, whose default value is true.
*/
private boolean useDNS(String name, boolean defaultValue) {
Boolean value = getBooleanObject("libdefaults", name);
diff --git a/jdk/src/share/classes/sun/security/krb5/internal/KDCOptions.java b/jdk/src/share/classes/sun/security/krb5/internal/KDCOptions.java
index d9fdf43d616..76a71cb603c 100644
--- a/jdk/src/share/classes/sun/security/krb5/internal/KDCOptions.java
+++ b/jdk/src/share/classes/sun/security/krb5/internal/KDCOptions.java
@@ -301,14 +301,14 @@ private void setDefault() {
if ((options & KDC_OPT_RENEWABLE_OK) == KDC_OPT_RENEWABLE_OK) {
set(RENEWABLE_OK, true);
} else {
- if (config.getBooleanValue("libdefaults", "renewable")) {
+ if (config.getBooleanObject("libdefaults", "renewable") == Boolean.TRUE) {
set(RENEWABLE_OK, true);
}
}
if ((options & KDC_OPT_PROXIABLE) == KDC_OPT_PROXIABLE) {
set(PROXIABLE, true);
} else {
- if (config.getBooleanValue("libdefaults", "proxiable")) {
+ if (config.getBooleanObject("libdefaults", "proxiable") == Boolean.TRUE) {
set(PROXIABLE, true);
}
}
@@ -316,7 +316,7 @@ private void setDefault() {
if ((options & KDC_OPT_FORWARDABLE) == KDC_OPT_FORWARDABLE) {
set(FORWARDABLE, true);
} else {
- if (config.getBooleanValue("libdefaults", "forwardable")) {
+ if (config.getBooleanObject("libdefaults", "forwardable") == Boolean.TRUE) {
set(FORWARDABLE, true);
}
}
diff --git a/jdk/src/share/classes/sun/security/krb5/internal/crypto/EType.java b/jdk/src/share/classes/sun/security/krb5/internal/crypto/EType.java
index abccce82415..2f314c9c09e 100644
--- a/jdk/src/share/classes/sun/security/krb5/internal/crypto/EType.java
+++ b/jdk/src/share/classes/sun/security/krb5/internal/crypto/EType.java
@@ -58,8 +58,8 @@ public static void initStatic() {
boolean allowed = false;
try {
Config cfg = Config.getInstance();
- String temp = cfg.get("libdefaults", "allow_weak_crypto");
- if (temp != null && temp.equals("true")) allowed = true;
+ allowed = cfg.getBooleanObject("libdefaults", "allow_weak_crypto")
+ == Boolean.TRUE;
} catch (Exception exc) {
if (DEBUG) {
System.out.println ("Exception in getting allow_weak_crypto, " +
diff --git a/jdk/src/share/classes/sun/security/pkcs11/P11Signature.java b/jdk/src/share/classes/sun/security/pkcs11/P11Signature.java
index 2841ae82eb2..7260c0bebfc 100644
--- a/jdk/src/share/classes/sun/security/pkcs11/P11Signature.java
+++ b/jdk/src/share/classes/sun/security/pkcs11/P11Signature.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -728,9 +728,12 @@ private byte[] pkcs1Pad(byte[] data) {
int len = (p11Key.length() + 7) >> 3;
RSAPadding padding = RSAPadding.getInstance
(RSAPadding.PAD_BLOCKTYPE_1, len);
- byte[] padded = padding.pad(data);
- return padded;
- } catch (GeneralSecurityException e) {
+ byte[] result = padding.pad(data);
+ if (result == null) {
+ throw new ProviderException("Error padding data");
+ }
+ return result;
+ } catch (InvalidKeyException | InvalidAlgorithmParameterException e) {
throw new ProviderException(e);
}
}
diff --git a/jdk/src/share/classes/sun/security/provider/certpath/ForwardBuilder.java b/jdk/src/share/classes/sun/security/provider/certpath/ForwardBuilder.java
index 00351647349..83f61823ed0 100644
--- a/jdk/src/share/classes/sun/security/provider/certpath/ForwardBuilder.java
+++ b/jdk/src/share/classes/sun/security/provider/certpath/ForwardBuilder.java
@@ -336,8 +336,11 @@ private void getMatchingCACerts(ForwardState currentState,
}
}
+ // Thread-local gate to prevent recursive provider lookups
+ private static ThreadLocal