Compare commits

..

10 Commits

Author SHA1 Message Date
openeuler-ci-bot
e9b5ca7cbf
!63 [sync] PR-57: 源码包同源异构问题修复
Merge pull request !63 from openeuler-sync-bot/sync-pr57-master-to-openEuler-24.03-LTS
2024-12-21 03:45:01 +00:00
沈仲伟
dbcb70e9e7 include all patches in the source package.
(cherry picked from commit 29fbc7c1d12de375b18ad36f21e9f5d95b7554e8)
2024-12-11 17:18:26 +08:00
openeuler-ci-bot
540d9a4a14
!60 [sync] PR-55: Build protoc, protoc-jar and protoc-jar-maven-plugin in prep state for riscv64
From: @openeuler-sync-bot 
Reviewed-by: @wenwj0 
Signed-off-by: @wenwj0
2024-12-11 08:58:17 +00:00
Dingli Zhang
5f99d862f4 Build protoc, protoc-jar and protoc-jar-maven-plugin in prep state for riscv64
(cherry picked from commit 2fedd6215f2f859c8558f778d02a59944a08bc6f)
2024-12-10 10:03:57 +08:00
openeuler-ci-bot
b1c6522ff2
!52 [sync] PR-51: Fix riscv64 support
From: @openeuler-sync-bot 
Reviewed-by: @wenwj0 
Signed-off-by: @wenwj0
2024-06-06 14:09:10 +00:00
Dingli Zhang
2c0ede0de3 Fix riscv64 support
(cherry picked from commit c321876b9507a8adc0f4d15ed2e12b58ed13584b)
2024-05-30 14:26:46 +08:00
openeuler-ci-bot
68f5380fd4
!50 [sync] PR-48: 更新maven plugin 中的npm下载地址
From: @openeuler-sync-bot 
Reviewed-by: @wenwj0 
Signed-off-by: @wenwj0
2024-05-01 01:51:30 +00:00
shaojiansong
3ff86f71f8 update npm download url for maven plugin
(cherry picked from commit 8bb7766578b4262786ce38c61e8adac626db88b0)
2024-04-29 16:36:01 +08:00
openeuler-ci-bot
c80ce77947
!46 更新版本1.17.1
From: @xiexing01 
Reviewed-by: @macchen1 
Signed-off-by: @macchen1
2023-10-09 06:32:34 +00:00
xiexing
0053bfe408
update README.md.
Signed-off-by: xiexing <xiexing4@hisilicon.com>
2023-09-28 03:57:25 +00:00
9 changed files with 182 additions and 5 deletions

View File

@ -0,0 +1,13 @@
--- flink-release-1.17.1/flink-runtime-web/pom.xml 2024-04-24 10:45:59.360106215 +0800
+++ flink-release-1.17.1-change/flink-runtime-web/pom.xml 2024-04-24 10:49:13.216106215 +0800
@@ -259,8 +259,9 @@
</goals>
<configuration>
<nodeVersion>v16.13.2</nodeVersion>
- <downloadRoot>https://repo.huaweicloud.com/nodejs/</downloadRoot>
+ <nodeDownloadRoot>https://repo.huaweicloud.com/nodejs/</nodeDownloadRoot>
<npmVersion>8.1.2</npmVersion>
+ <npmDownloadRoot>https://mirrors.huaweicloud.com/repository/npm/npm/-/</npmDownloadRoot>
</configuration>
</execution>
<execution>

View File

@ -0,0 +1,13 @@
diff --git a/flink-formats/flink-parquet/pom.xml b/flink-formats/flink-parquet/pom.xml
index a95f6ce0..6085256d 100644
--- a/flink-formats/flink-parquet/pom.xml
+++ b/flink-formats/flink-parquet/pom.xml
@@ -251,7 +251,7 @@ under the License.
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
- <version>1.7.0</version>
+ <version>1.7.1</version>
</extension>
</extensions>

View File

@ -0,0 +1,23 @@
From 2c52801962444baa444401b8fafd09bd6cfab92b Mon Sep 17 00:00:00 2001
From: Dingli Zhang <dingli@iscas.ac.cn>
Date: Thu, 9 May 2024 00:02:35 +0800
Subject: [PATCH] Add support for riscv64
---
src/main/java/com/github/os72/protocjar/PlatformDetector.java | 3 +++
1 file changed, 3 insertions(+)
diff --git a/src/main/java/com/github/os72/protocjar/PlatformDetector.java b/src/main/java/com/github/os72/protocjar/PlatformDetector.java
index 7a99348..b8f08e5 100644
--- a/src/main/java/com/github/os72/protocjar/PlatformDetector.java
+++ b/src/main/java/com/github/os72/protocjar/PlatformDetector.java
@@ -211,6 +211,9 @@ private static String normalizeArch(String value) {
if ("ppc64le".equals(value)) {
return "ppcle_64";
}
+ if ("riscv64".equals(value)) {
+ return "riscv64";
+ }
if ("s390".equals(value)) {
return "s390_32";
}

View File

@ -0,0 +1,56 @@
From 8b626946fcabc659a41de3b15cbe6f5d45bd596e Mon Sep 17 00:00:00 2001
From: Dingli Zhang <dingli@iscas.ac.cn>
Date: Tue, 7 May 2024 13:56:04 +0800
Subject: [PATCH] Added support for building the riscv64 protoc binaries with
Kokoro
diff --git a/kokoro/release/protoc/linux/build.sh b/kokoro/release/protoc/linux/build.sh
index 44a95aab3..13be7b2ed 100755
--- a/kokoro/release/protoc/linux/build.sh
+++ b/kokoro/release/protoc/linux/build.sh
@@ -22,6 +22,9 @@ protoc-artifacts/build-protoc.sh linux aarch_64 protoc
sudo apt install -y g++-powerpc64le-linux-gnu
protoc-artifacts/build-protoc.sh linux ppcle_64 protoc
+sudo apt install -y g++-riscv64-linux-gnu
+protoc-artifacts/build-protoc.sh linux riscv64 protoc
+
sudo apt install -y g++-s390x-linux-gnu
protoc-artifacts/build-protoc.sh linux s390_64 protoc
diff --git a/protoc-artifacts/build-protoc.sh b/protoc-artifacts/build-protoc.sh
index 460284262..20ca05739 100755
--- a/protoc-artifacts/build-protoc.sh
+++ b/protoc-artifacts/build-protoc.sh
@@ -93,6 +93,8 @@ checkArch ()
assertEq $format "elf64-x86-64" $LINENO
elif [[ "$ARCH" == aarch_64 ]]; then
assertEq $format "elf64-little" $LINENO
+ elif [[ "$ARCH" == riscv64 ]]; then
+ assertEq $format "elf64-littleriscv" $LINENO
elif [[ "$ARCH" == s390_64 ]]; then
if [[ $host_machine == s390x ]];then
assertEq $format "elf64-s390" $LINENO
@@ -162,6 +164,9 @@ checkDependencies ()
elif [[ "$ARCH" == aarch_64 ]]; then
dump_cmd='objdump -p '"$1"' | grep NEEDED'
white_list="libpthread\.so\.0\|libm\.so\.6\|libc\.so\.6\|ld-linux-aarch64\.so\.1"
+ elif [[ "$ARCH" == riscv64 ]]; then
+ dump_cmd='objdump -p '"$1"' | grep NEEDED'
+ white_list="libz\.so\.1\|libm\.so\.6\|libc\.so\.6\|ld-linux-riscv64-lp64d\.so\.1"
fi
elif [[ "$OS" == osx ]]; then
dump_cmd='otool -L '"$1"' | fgrep dylib'
@@ -226,6 +231,8 @@ elif [[ "$(uname)" == Linux* ]]; then
elif [[ "$ARCH" == ppcle_64 ]]; then
CXXFLAGS="$CXXFLAGS -m64"
CONFIGURE_ARGS="$CONFIGURE_ARGS --host=powerpc64le-linux-gnu"
+ elif [[ "$ARCH" == riscv64 ]]; then
+ CONFIGURE_ARGS="$CONFIGURE_ARGS --host=riscv64-linux-gnu"
elif [[ "$ARCH" == s390_64 ]]; then
CXXFLAGS="$CXXFLAGS -m64"
CONFIGURE_ARGS="$CONFIGURE_ARGS --host=s390x-linux-gnu"
--
2.41.0

View File

@ -1,10 +1,22 @@
# flink # flink
#### 介绍 #### 介绍
Stateful Computations over Data Streams Apache Flink是由Apache软件基金会开发的开源流处理框架其核心是用Java和Scala编写的分布式流数据处理引擎。Flink是一个面向分布式数据流处理和批量数据处理的开源计算平台它能够基于同一个Flink运行时提供支持流处理和批处理两种类型应用的功能。
Flink以数据并行和流水线方式执行任意流数据程序Flink的流水线运行时系统可以执行批处理和流处理程序。此外Flink的运行时本身也支持迭代算法的执行。
现有的开源计算方案会把流处理和批处理作为两种不同的应用类型因为它们所提供的SLAService-Level-Aggreement是完全不相同的流处理一般需要支持低延迟、Exactly-once保证而批处理需要支持高吞吐、高效处理。
#### 软件架构 #### 软件架构
软件架构说明 软件架构说明
Flink的架构体系同样也遵行分层架构设计的理念基本上分为三层API&Libraries层、Runtine核心层以及物理部署层。
- API&Libraries层提供了支撑流计算和批计算的接口同时在此基础之上抽象出不同的应用类型的组件库。
- Runtime 核心层负责对上层不同接口提供基础服务支持分布式Stream作业的执行、JobGraph到ExecutionGraph 的映射转换、任务调度等将DataStream和DataSet转成统一的可执行的Task Operator.
- 物理部署层Flink 支持多种部署模式本机集群Standalone/YARN、云GCE/EC2、Kubenetes。
#### ARM支持
1. 移植指南https://www.hikunpeng.com/document/detail/zh/kunpengbds/ecosystemEnable/Flink/kunpengflink191_02_0001.html
2. 部署指南https://www.hikunpeng.com/document/detail/zh/kunpengbds/ecosystemEnable/Flink/kunpengflink_04_0001.html
3. 调优指南https://www.hikunpeng.com/document/detail/zh/kunpengbds/ecosystemEnable/Flink/kunpengflinkhdp_05_0002.html
#### 安装教程 #### 安装教程

View File

@ -11,7 +11,7 @@
Name: flink Name: flink
Version: 1.17.1 Version: 1.17.1
Release: 1 Release: 5
Summary: Stateful Computations over Data Streams Summary: Stateful Computations over Data Streams
License: Apache License v2.0 License: Apache License v2.0
URL: https://github.com/apache/%{name} URL: https://github.com/apache/%{name}
@ -19,20 +19,60 @@ Source0: https://github.com/apache/%{name}/archive/release-%{version}.tar
Source2: https://packages.confluent.io/maven/io/confluent/kafka-schema-registry-client/7.2.2/kafka-schema-registry-client-7.2.2.jar Source2: https://packages.confluent.io/maven/io/confluent/kafka-schema-registry-client/7.2.2/kafka-schema-registry-client-7.2.2.jar
Source3: https://packages.confluent.io/maven/io/confluent/kafka-avro-serializer/7.2.2/kafka-avro-serializer-7.2.2.jar Source3: https://packages.confluent.io/maven/io/confluent/kafka-avro-serializer/7.2.2/kafka-avro-serializer-7.2.2.jar
Source4: https://packages.confluent.io/maven/io/confluent/kafka-schema-serializer/7.2.2/kafka-schema-serializer-7.2.2.jar Source4: https://packages.confluent.io/maven/io/confluent/kafka-schema-serializer/7.2.2/kafka-schema-serializer-7.2.2.jar
Source5: https://packages.confluent.io/maven/org/apache/kafka/kafka-clients/7.2.2-ccs/kafka-clients-7.2.2-ccs.jar Source5: https://packages.confluent.io/maven/org/apache/kafka/kafka-clients/7.2.2-ccs/kafka-clients-7.2.2-ccs.jar
Source6: npm-8.1.2.tar.gz Source6: npm-8.1.2.tar.gz
Source7: https://github.com/protocolbuffers/protobuf/archive/refs/tags/v3.21.7.tar.gz
Source8: https://github.com/os72/protoc-jar/archive/refs/tags/v3.11.4.tar.gz
Source9: https://github.com/os72/protoc-jar-maven-plugin/archive/refs/tags/v3.11.4.zip
Patch0: 0001-add-npm.hw-repo.patch Patch0: 0001-add-npm.hw-repo.patch
Patch1: 0002-fix-compilation-failure.patch Patch1: 0002-fix-compilation-failure.patch
Patch2: 0003-Skip-flink-avro-confluent-registry-test.patch Patch2: 0003-Skip-flink-avro-confluent-registry-test.patch
Patch3: 0004-update-npm.hw-repo.patch
Patch4: 0005-upgrade-os-maven-plugin-to-1.7.1.patch
Patch1000: 1000-Add-protoc-java-support-for-riscv64.patch
Patch1001: 1001-Added-support-for-building-the-riscv64-protoc-binari.patch
BuildRequires: java-1.8.0-openjdk-devel maven npm BuildRequires: java-1.8.0-openjdk-devel maven npm
Requires: java-1.8.0-openjdk Requires: java-1.8.0-openjdk
%ifarch riscv64
BuildRequires: autoconf automake libtool pkgconfig zlib-devel libstdc++-static gcc-c++
%endif
%description %description
Apache Flink is a framework and distributed processing engine for stateful computations over unbounded and bounded data streams. Flink has been designed to run in all common cluster environments, perform computations at in-memory speed and at any scale. Apache Flink is a framework and distributed processing engine for stateful computations over unbounded and bounded data streams. Flink has been designed to run in all common cluster environments, perform computations at in-memory speed and at any scale.
%prep %prep
%autosetup -p1 -n %{name}-release-%{version} %ifarch riscv64
%setup -qn %{name}-release-%{version}
%patch0 -p1
%patch1 -p1
%patch2 -p1
%patch3 -p1
%patch4 -p1
mkdir -p ${HOME}/%{name}-prep_dir
# protoc
tar -mxf %{SOURCE7} -C ${HOME}/%{name}-prep_dir
pushd ${HOME}/%{name}-prep_dir/protobuf-3.21.7
%patch1001 -p1
./autogen.sh
./protoc-artifacts/build-protoc.sh linux riscv64 protoc
mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=3.21.7 -Dclassifier=linux-riscv64 -Dpackaging=exe -Dfile=protoc-artifacts/target/linux/riscv64/protoc.exe
popd
# protoc-jar
tar -mxf %{SOURCE8} -C ${HOME}/%{name}-prep_dir
pushd ${HOME}/%{name}-prep_dir/protoc-jar-3.11.4
%patch1000 -p1
mvn clean install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true
popd
# protoc-jar-maven-plugin
unzip %{SOURCE9} -d ${HOME}/%{name}-prep_dir
pushd ${HOME}/%{name}-prep_dir/protoc-jar-maven-plugin-3.11.4
mvn clean install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true
popd
%else
%setup -n %{name}-release-%{version}
%autopatch -M999 -p1
%endif
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-registry-client -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE2} mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-registry-client -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE2}
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-avro-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE3} mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-avro-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE3}
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE4} mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE4}
@ -41,8 +81,14 @@ mkdir -p ${HOME}/.m2/repository/com/github/eirslett/npm/8.1.2/
cp %{SOURCE6} ${HOME}/.m2/repository/com/github/eirslett/npm/8.1.2/ cp %{SOURCE6} ${HOME}/.m2/repository/com/github/eirslett/npm/8.1.2/
%build %build
%ifarch riscv64
export MAVEN_OPTS="-Xms10240m -Xmx10240m -Xss2m -XX:MaxTenuringThreshold=0"
export JAVA_TOOL_OPTIONS="-Xms10240m -Xmx10240m"
mvn clean install -DskipTests -Dfast -T 2 -Pskip-webui-build
%else
npm config set registry https://repo.huaweicloud.com/repository/npm/ npm config set registry https://repo.huaweicloud.com/repository/npm/
mvn clean install -DskipTests -Dfast -T 2 mvn clean install -DskipTests -Dfast -T 2
%endif
%install %install
mkdir -p %{buildroot}/opt/ mkdir -p %{buildroot}/opt/
@ -56,6 +102,20 @@ find %{buildroot}/opt/apache-%{name}-%{version}/ -type f -name '*.py' | xargs -i
%license LICENSE %license LICENSE
%changelog %changelog
* Tue Dec 10 2024 shenzhongwei <shenzhongwei@kylinos.cn> - 1.17.1-5
- include all patches in the source package.
* Tue Jun 25 2024 Dingli Zhang <dingli@iscas.ac.cn> - 1.17.1-4
- Remove riscv64 prebuilded files
- Build protoc, protoc-jar and protoc-jar-maven-plugin in prep state for riscv64
* Wed May 29 2024 Dingli Zhang <dingli@iscas.ac.cn> - 1.17.1-3
- Fix riscv64 support
- Upgrade os-maven-plugin to 1.7.1
* Wed Apr 24 2024 shaojiansong <shaojiansong@kylinos.cn> - 1.17.1-2
- update npm download url for maven plugin
* Wed Sep 27 2023 xiexing <xiexing4@hisilicon.com> - 1.17.1-1 * Wed Sep 27 2023 xiexing <xiexing4@hisilicon.com> - 1.17.1-1
- update version to 1.17.1 - update version to 1.17.1

BIN
v3.11.4.tar.gz Normal file

Binary file not shown.

BIN
v3.11.4.zip Normal file

Binary file not shown.

BIN
v3.21.7.tar.gz Normal file

Binary file not shown.