Compare commits

...

10 Commits

Author SHA1 Message Date
openeuler-ci-bot
6f00704fa0
!115 [sync]【EulerMaker】Fix hadoop在openEuler-24.03-LTS-SP1:everything构建失败
From: @xuduo01 
Reviewed-by: @wenwj0 
Signed-off-by: @wenwj0
2024-12-14 00:16:09 +00:00
xuduo
d83fdbdf18 add hadoop build config 2024-12-13 21:49:25 +08:00
openeuler-ci-bot
eec1c40d99
!107 优先使用构建环境统一npm配置而不需要单独在本项目中修改
From: @xiaochn 
Reviewed-by: @wenwj0 
Signed-off-by: @wenwj0
2024-11-13 01:23:33 +00:00
xiaochuannan
0af5dbcc5f 优先使用构建环境统一npm配置而不需要单独在本项目中修改 2024-11-11 16:20:07 +08:00
openeuler-ci-bot
daa0e99658
!97 fix CVE-2024-23454
From: @fandeyuan 
Reviewed-by: @wenwj0 
Signed-off-by: @wenwj0
2024-10-08 06:56:39 +00:00
fandeyuan
38d63cf76d fix CVE-2024-23454 2024-09-26 17:00:03 +08:00
openeuler-ci-bot
bf336bb857
!91 [sync] PR-90: Fix build on riscv64
From: @openeuler-sync-bot 
Reviewed-by: @wenwj0 
Signed-off-by: @wenwj0
2024-06-06 13:43:30 +00:00
Dingli Zhang
1db6ed0535 Fix build on riscv64
(cherry picked from commit 536b3177bd9eb34eeb07f9b923fa707f5797b3bb)
2024-05-28 22:04:18 +08:00
openeuler-ci-bot
7944f9015a
!84 add conflicts in spec
From: @xiexing01 
Reviewed-by: @macchen1 
Signed-off-by: @macchen1
2023-12-18 09:05:47 +00:00
xiexing01
eddeb27d28 add conflictstohadoopspec 2023-12-15 06:24:17 +00:00
7 changed files with 164 additions and 46 deletions

View File

@ -0,0 +1,14 @@
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 0b2f6f17..c402535f 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -151,7 +151,7 @@
<protobuf-compile.version>3.5.1</protobuf-compile.version>
<grpc.version>1.10.0</grpc.version>
- <os-maven-plugin.version>1.7.0</os-maven-plugin.version>
+ <os-maven-plugin.version>1.7.1</os-maven-plugin.version>
<!-- define the Java language version used by the compiler -->
<javac.version>1.8</javac.version>

View File

@ -0,0 +1,23 @@
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h
index 000c4b91..70e94487 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h
@@ -99,7 +99,7 @@ inline void simple_memcpy(void * dest, const void * src, size_t len) {
inline uint32_t bswap(uint32_t val) {
#ifdef __aarch64__
__asm__("rev %w[dst], %w[src]" : [dst]"=r"(val) : [src]"r"(val));
-#elif defined(__ppc64__)||(__PPC64__)||(__powerpc64__)
+#elif defined(__ppc64__)||(__PPC64__)||(__powerpc64__)||(__riscv)
return __builtin_bswap32(val);
#else
__asm__("bswap %0" : "=r" (val) : "0" (val));
@@ -110,7 +110,7 @@ inline uint32_t bswap(uint32_t val) {
inline uint64_t bswap64(uint64_t val) {
#ifdef __aarch64__
__asm__("rev %[dst], %[src]" : [dst]"=r"(val) : [src]"r"(val));
-#elif defined(__ppc64__)||(__PPC64__)||(__powerpc64__)
+#elif defined(__ppc64__)||(__PPC64__)||(__powerpc64__)||(__riscv)
return __builtin_bswap64(val);
#else
#ifdef __X64

View File

@ -0,0 +1,57 @@
From 7c30e7ffb65f9a58a85b3b556f8c0de04c1b4b20 Mon Sep 17 00:00:00 2001
From: He Xiaoqiao <hexiaoqiao@apache.org>
Date: Mon, 15 Jan 2024 16:01:08 +0800
Subject: [PATCH] HADOOP-19031. Enhance access control for RunJar.
---
.../main/java/org/apache/hadoop/util/RunJar.java | 16 +++++++++-------
1 file changed, 9 insertions(+), 7 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
index c28e69f5..e527f602 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
@@ -28,10 +28,14 @@ import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
+import java.nio.file.attribute.FileAttribute;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
+import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
@@ -287,20 +291,18 @@ public class RunJar {
final File workDir;
try {
- workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
- } catch (IOException ioe) {
+ FileAttribute<Set<PosixFilePermission>> perms = PosixFilePermissions
+ .asFileAttribute(PosixFilePermissions.fromString("rwx------"));
+ workDir = Files.createTempDirectory(tmpDir.toPath(), "hadoop-unjar", perms).toFile();
+ } catch (IOException | SecurityException e) {
// If user has insufficient perms to write to tmpDir, default
// "Permission denied" message doesn't specify a filename.
System.err.println("Error creating temp dir in java.io.tmpdir "
- + tmpDir + " due to " + ioe.getMessage());
+ + tmpDir + " due to " + e.getMessage());
System.exit(-1);
return;
}
- if (!workDir.delete()) {
- System.err.println("Delete failed for " + workDir);
- System.exit(-1);
- }
ensureDirectory(workDir);
ShutdownHookManager.get().addShutdownHook(
--
2.43.0

View File

@ -9,9 +9,11 @@
%global __requires_exclude_from ^%{_libdir}/%{name}/libhadoop.so$
%global __provides_exclude_from ^%{_libdir}/%{name}/.*$
%define _binaries_in_noarch_packages_terminate_build 0
%define huawei_repo https://repo.huaweicloud.com/repository
Name: hadoop
Version: 3.3.6
Release: 1
Release: 6
Summary: A software platform for processing vast amounts of data
# The BSD license file is missing
# https://issues.apache.org/jira/browse/HADOOP-9849
@ -34,12 +36,16 @@ Source13: %{name}-yarn-site.xml
Source14: yarn-v1.22.5.tar.gz
Source15: node-12.22.1-linux-x64.tar.gz
Source16: node-v12.22.1-linux-arm64.tar.gz
Source17: settings.xml
Source17: protoc-3.7.1-linux-riscv64.exe
Source18: protoc-gen-grpc-java-1.26.0-linux-riscv_64.exe
Patch0: 01-lock-triple-beam-version-to-1.3.0.patch
Patch0: 01-lock-triple-beam-version-to-1.3.0.patch
Patch1: 02-Upgrade-os-maven-plugin-to-1.7.1.patch
Patch2: 03-Fix-build-on-riscv.patch
Patch3: 04-Enhance-access-control-for-RunJar.patch
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
BuildRequires: java-1.8.0-openjdk-devel maven hostname maven-local tomcat cmake snappy openssl-devel
BuildRequires: java-1.8.0-openjdk-devel maven hostname maven-local tomcat cmake snappy openssl-devel
BuildRequires: cyrus-sasl-devel chrpath systemd protobuf2-compiler protobuf2-devel protobuf2-java protobuf2
BuildRequires: leveldbjni leveldb-java hawtjni-runtime gcc-c++
BuildRequires: npm chrpath
@ -76,6 +82,7 @@ Obsoletes: %{name}-javadoc < 2.4.1-22%{?dist}
Requires: apache-zookeeper
Requires: leveldb
Requires: protobuf2-java
Conflicts: hadoop-3.1-client
%description common
Apache Hadoop is a framework that allows for the distributed processing of
@ -89,6 +96,7 @@ Hadoop modules.
%package common-native
Summary: The native Apache Hadoop library file
Requires: %{name}-common = %{version}-%{release}
Conflicts: hadoop-3.1-common
%description common-native
Apache Hadoop is a framework that allows for the distributed processing of
@ -101,6 +109,7 @@ This package contains the native-hadoop library
%package devel
Summary: Headers for Apache Hadoop
Requires: libhdfs%{?_isa} = %{version}-%{release}
Conflicts: hadoop-3.1-common-native
%description devel
Header files for Apache Hadoop's hdfs library and other utilities
@ -113,6 +122,7 @@ Requires: %{name}-common = %{version}-%{release}
Requires(post): systemd
Requires(preun): systemd
Requires(postun): systemd
Conflicts: hadoop-3.1-hdfs
%description hdfs
Apache Hadoop is a framework that allows for the distributed processing of
@ -136,6 +146,7 @@ Requires: tcnative
Requires(post): systemd
Requires(preun): systemd
Requires(postun): systemd
Conflicts: hadoop-3.1-httpfs
%description httpfs
Apache Hadoop is a framework that allows for the distributed processing of
@ -150,6 +161,7 @@ the complete FileSystem/FileContext interface in HDFS.
Summary: The Apache Hadoop Filesystem Library
Requires: %{name}-hdfs = %{version}-%{release}
Requires: lzo
Conflicts: hadoop-3.1-libhdfs
%description -n libhdfs
Apache Hadoop is a framework that allows for the distributed processing of
@ -167,6 +179,7 @@ Requires: %{name}-mapreduce-examples = %{version}-%{release}
Requires(post): systemd
Requires(preun): systemd
Requires(postun): systemd
Conflicts: hadoop-3.1-mapreduce
%description mapreduce
Apache Hadoop is a framework that allows for the distributed processing of
@ -180,6 +193,7 @@ This package provides Apache Hadoop MapReduce (MRv2).
Summary: Apache Hadoop MapReduce (MRv2) examples
BuildArch: noarch
Requires: hsqldb
Conflicts: hadoop-3.1-mapreduce-examples
%description mapreduce-examples
This package contains mapreduce examples.
@ -188,6 +202,7 @@ This package contains mapreduce examples.
Summary: Apache Hadoop maven plugin
BuildArch: noarch
Requires: maven
Conflicts: hadoop-3.1-maven-plugin
%description maven-plugin
The Apache Hadoop maven plugin
@ -199,6 +214,7 @@ Requires: %{name}-common = %{version}-%{release}
Requires: %{name}-hdfs = %{version}-%{release}
Requires: %{name}-mapreduce = %{version}-%{release}
Requires: %{name}-yarn = %{version}-%{release}
Conflicts: hadoop-3.1-tests
%description tests
Apache Hadoop is a framework that allows for the distributed processing of
@ -220,6 +236,7 @@ Requires: leveldbjni
Requires(post): systemd
Requires(preun): systemd
Requires(postun): systemd
Conflicts: hadoop-3.1-yarn nodejs-yarn
%description yarn
Apache Hadoop is a framework that allows for the distributed processing of
@ -232,6 +249,7 @@ This package contains Apache Hadoop YARN.
%package yarn-security
Summary: The ability to run Apache Hadoop YARN in secure mode
Requires: %{name}-yarn = %{version}-%{release}
Conflicts: hadoop-3.1-yarn-security
%description yarn-security
Apache Hadoop is a framework that allows for the distributed processing of
@ -243,27 +261,30 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
%prep
%autosetup -p1 -n %{name}-%{version}-src
cp %{SOURCE17} ./
sed -i "s,@HOME@,${HOME},g" settings.xml
mvn install:install-file -DgroupId=org.fusesource.leveldbjni -DartifactId=leveldbjni-all -Dversion=1.8 -Dpackaging=jar -Dfile=/usr/lib/java/leveldbjni-all.jar -s settings.xml
mvn install:install-file -DgroupId=org.fusesource.leveldbjni -DartifactId=leveldbjni -Dversion=1.8 -Dpackaging=jar -Dfile=/usr/lib/java/leveldbjni/leveldbjni.jar -s settings.xml
mvn install:install-file -DgroupId=org.iq80.leveldb -DartifactId=leveldb-api -Dversion=0.7 -Dpackaging=jar -Dfile=/usr/share/java/leveldb-java/leveldb-api.jar -s settings.xml
mvn install:install-file -DgroupId=org.iq80.leveldb -DartifactId=leveldb-benchmark -Dversion=0.7 -Dpackaging=jar -Dfile=/usr/share/java/leveldb-java/leveldb-benchmark.jar -s settings.xml
mvn install:install-file -DgroupId=org.iq80.leveldb -DartifactId=leveldb -Dversion=0.7 -Dpackaging=jar -Dfile=/usr/share/java/leveldb-java/leveldb.jar -s settings.xml
mvn install:install-file -DgroupId=orn.fusesource.hawtjni -DartifactId=hawtjni-runtime -Dversion=1.16 -Dpackaging=jar -Dfile=/usr/lib/java/hawtjni/hawtjni-runtime.jar -s settings.xml
%ifarch riscv64
mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=2.5.0 -Dclassifier=linux-riscv64 -Dpackaging=exe -Dfile=/usr/bin/protoc
mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=3.7.1 -Dclassifier=linux-riscv64 -Dpackaging=exe -Dfile=%{SOURCE17}
mvn install:install-file -DgroupId=io.grpc -DartifactId=protoc-gen-grpc-java -Dversion=1.26.0 -Dclassifier=linux-riscv64 -Dpackaging=exe -Dfile=%{SOURCE18}
%endif
mvn install:install-file -DgroupId=org.fusesource.leveldbjni -DartifactId=leveldbjni-all -Dversion=1.8 -Dpackaging=jar -Dfile=/usr/lib/java/leveldbjni-all.jar
mvn install:install-file -DgroupId=org.fusesource.leveldbjni -DartifactId=leveldbjni -Dversion=1.8 -Dpackaging=jar -Dfile=/usr/lib/java/leveldbjni/leveldbjni.jar
mvn install:install-file -DgroupId=org.iq80.leveldb -DartifactId=leveldb-api -Dversion=0.7 -Dpackaging=jar -Dfile=/usr/share/java/leveldb-java/leveldb-api.jar
mvn install:install-file -DgroupId=org.iq80.leveldb -DartifactId=leveldb-benchmark -Dversion=0.7 -Dpackaging=jar -Dfile=/usr/share/java/leveldb-java/leveldb-benchmark.jar
mvn install:install-file -DgroupId=org.iq80.leveldb -DartifactId=leveldb -Dversion=0.7 -Dpackaging=jar -Dfile=/usr/share/java/leveldb-java/leveldb.jar
mvn install:install-file -DgroupId=orn.fusesource.hawtjni -DartifactId=hawtjni-runtime -Dversion=1.16 -Dpackaging=jar -Dfile=/usr/lib/java/hawtjni/hawtjni-runtime.jar
mkdir -p ${HOME}/repository/com/github/eirslett/node/12.22.1/
cp %{SOURCE15} ${HOME}/repository/com/github/eirslett/node/12.22.1/
cp %{SOURCE16} ${HOME}/repository/com/github/eirslett/node/12.22.1/
mv ${HOME}/repository/com/github/eirslett/node/12.22.1/node-v12.22.1-linux-arm64.tar.gz ${HOME}/repository/com/github/eirslett/node/12.22.1/node-12.22.1-linux-arm64.tar.gz
mkdir -p ${HOME}/repository/com/github/eirslett/yarn/1.22.5/
cp %{SOURCE14} ${HOME}/repository/com/github/eirslett/yarn/1.22.5/
mv ${HOME}/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5.tar.gz ${HOME}/repository/com/github/eirslett/yarn/1.22.5/yarn-1.22.5.tar.gz
tar -xzvf ${HOME}/repository/com/github/eirslett/yarn/1.22.5/yarn-1.22.5.tar.gz -C ${HOME}/repository/com/github/eirslett/yarn/1.22.5/
npm config set registry https://repo.huaweicloud.com/repository/npm/
mkdir -p ${HOME}/.m2/repository/com/github/eirslett/node/12.22.1/
cp %{SOURCE15} ${HOME}/.m2/repository/com/github/eirslett/node/12.22.1/
cp %{SOURCE16} ${HOME}/.m2/repository/com/github/eirslett/node/12.22.1/
mv ${HOME}/.m2/repository/com/github/eirslett/node/12.22.1/node-v12.22.1-linux-arm64.tar.gz ${HOME}/.m2/repository/com/github/eirslett/node/12.22.1/node-12.22.1-linux-arm64.tar.gz
mkdir -p ${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/
cp %{SOURCE14} ${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/
mv ${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5.tar.gz ${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/yarn-1.22.5.tar.gz
tar -xzvf ${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/yarn-1.22.5.tar.gz -C ${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/
npm config set registry %{huawei_repo}/npm/
npm cache clean -f
${HOME}/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5/bin/yarn config set registry https://repo.huaweicloud.com/repository/npm/ -g
${HOME}/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5/bin/yarn config set ignore-engines true
${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5/bin/yarn config set registry %{huawei_repo}/npm/ -g
${HOME}/.m2/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5/bin/yarn config set ignore-engines true
%pom_add_dep org.iq80.leveldb:leveldb-api:0.7 hadoop-hdfs-project/hadoop-hdfs
%pom_add_dep org.iq80.leveldb:leveldb-api:0.7 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy
@ -277,6 +298,9 @@ ${HOME}/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5/bin/yarn config
%pom_disable_module hadoop-azure hadoop-tools
%pom_disable_module hadoop-yarn-server-timelineservice-hbase-tests hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
%ifarch riscv64
%pom_disable_module hadoop-yarn-applications-catalog-webapp hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/pom.xml
%endif
# War files we don't want
%mvn_package :%{name}-auth-examples __noinstall
@ -313,7 +337,12 @@ ${HOME}/repository/com/github/eirslett/yarn/1.22.5/yarn-v1.22.5/bin/yarn config
%mvn_file :%{name}-common::tests: %{name}/%{name}-common
%build
mvn clean -Dsnappy.lib=/usr/lib64 -Dbundle.snappy -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipIT -Dmaven.javadoc.skip=true package -s settings.xml
%ifarch riscv64
export MAVEN_OPTS="-Xms2048M -Xmx8000M"
function keepalive() { while true; do date; sleep 3600; done }
keepalive &
%endif
mvn clean -Dsnappy.lib=/usr/lib64 -Dbundle.snappy -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipIT -Dmaven.javadoc.skip=true package
%install
# Copy all jar files except those generated by the build
@ -1133,6 +1162,23 @@ fi
%config(noreplace) %{_sysconfdir}/%{name}/container-executor.cfg
%changelog
* Fri Dec 13 2024 xuduo <fs-yao@126.com> - 3.3.6-6
- Add huawei repository
* Mon Nov 11 2024 xiaochn <xiaochuannan@inspur.com> - 3.3.6-5
- Remove npm repo in spec file and use unified configuration of the build environment
* Thu Sep 26 2024 Deyuan Fan <fandeyuan@kylinos.cn> - 3.3.6-4
- fix CVE-2024-23454
* Sun May 12 2024 Dingli Zhang <dingli@iscas.ac.cn> - 3.3.6-3
- Fix build on riscv64
- Upgrade os-maven-plugin to 1.7.1
- Remove settings.xml and use ${HOME}/.m2/repository as maven repo
* Fri Dec 15 2023 xiexing <xiexing4@hisilicon.com> - 3.3.6-2
- add conflicts to hadoop spec
* Mon Nov 27 2023 wenweijian wenweijian2@huawei.com - 3.3.6-1
- fix cve CVE-2023-26031

Binary file not shown.

Binary file not shown.

View File

@ -1,22 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
<localRepository>@HOME@/repository/</localRepository>
<pluginGroups>
</pluginGroups>
<proxies>
</proxies>
<servers>
</servers>
<mirrors>
<mirror>
<id>huaweicloud</id>
<mirrorOf>*</mirrorOf>
<url>https://repo.huaweicloud.com/repository/maven/</url>
</mirror>
</mirrors>
<profiles>
</profiles>
</settings>