Compare commits
10 Commits
aa5a2e8d15
...
e9b5ca7cbf
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9b5ca7cbf | ||
|
|
dbcb70e9e7 | ||
|
|
540d9a4a14 | ||
|
|
5f99d862f4 | ||
|
|
b1c6522ff2 | ||
|
|
2c0ede0de3 | ||
|
|
68f5380fd4 | ||
|
|
3ff86f71f8 | ||
|
|
c80ce77947 | ||
|
|
0053bfe408 |
13
0004-update-npm.hw-repo.patch
Normal file
13
0004-update-npm.hw-repo.patch
Normal file
@ -0,0 +1,13 @@
|
||||
--- flink-release-1.17.1/flink-runtime-web/pom.xml 2024-04-24 10:45:59.360106215 +0800
|
||||
+++ flink-release-1.17.1-change/flink-runtime-web/pom.xml 2024-04-24 10:49:13.216106215 +0800
|
||||
@@ -259,8 +259,9 @@
|
||||
</goals>
|
||||
<configuration>
|
||||
<nodeVersion>v16.13.2</nodeVersion>
|
||||
- <downloadRoot>https://repo.huaweicloud.com/nodejs/</downloadRoot>
|
||||
+ <nodeDownloadRoot>https://repo.huaweicloud.com/nodejs/</nodeDownloadRoot>
|
||||
<npmVersion>8.1.2</npmVersion>
|
||||
+ <npmDownloadRoot>https://mirrors.huaweicloud.com/repository/npm/npm/-/</npmDownloadRoot>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
13
0005-upgrade-os-maven-plugin-to-1.7.1.patch
Normal file
13
0005-upgrade-os-maven-plugin-to-1.7.1.patch
Normal file
@ -0,0 +1,13 @@
|
||||
diff --git a/flink-formats/flink-parquet/pom.xml b/flink-formats/flink-parquet/pom.xml
|
||||
index a95f6ce0..6085256d 100644
|
||||
--- a/flink-formats/flink-parquet/pom.xml
|
||||
+++ b/flink-formats/flink-parquet/pom.xml
|
||||
@@ -251,7 +251,7 @@ under the License.
|
||||
<extension>
|
||||
<groupId>kr.motd.maven</groupId>
|
||||
<artifactId>os-maven-plugin</artifactId>
|
||||
- <version>1.7.0</version>
|
||||
+ <version>1.7.1</version>
|
||||
</extension>
|
||||
</extensions>
|
||||
|
||||
23
1000-Add-protoc-java-support-for-riscv64.patch
Normal file
23
1000-Add-protoc-java-support-for-riscv64.patch
Normal file
@ -0,0 +1,23 @@
|
||||
From 2c52801962444baa444401b8fafd09bd6cfab92b Mon Sep 17 00:00:00 2001
|
||||
From: Dingli Zhang <dingli@iscas.ac.cn>
|
||||
Date: Thu, 9 May 2024 00:02:35 +0800
|
||||
Subject: [PATCH] Add support for riscv64
|
||||
|
||||
---
|
||||
src/main/java/com/github/os72/protocjar/PlatformDetector.java | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/src/main/java/com/github/os72/protocjar/PlatformDetector.java b/src/main/java/com/github/os72/protocjar/PlatformDetector.java
|
||||
index 7a99348..b8f08e5 100644
|
||||
--- a/src/main/java/com/github/os72/protocjar/PlatformDetector.java
|
||||
+++ b/src/main/java/com/github/os72/protocjar/PlatformDetector.java
|
||||
@@ -211,6 +211,9 @@ private static String normalizeArch(String value) {
|
||||
if ("ppc64le".equals(value)) {
|
||||
return "ppcle_64";
|
||||
}
|
||||
+ if ("riscv64".equals(value)) {
|
||||
+ return "riscv64";
|
||||
+ }
|
||||
if ("s390".equals(value)) {
|
||||
return "s390_32";
|
||||
}
|
||||
@ -0,0 +1,56 @@
|
||||
From 8b626946fcabc659a41de3b15cbe6f5d45bd596e Mon Sep 17 00:00:00 2001
|
||||
From: Dingli Zhang <dingli@iscas.ac.cn>
|
||||
Date: Tue, 7 May 2024 13:56:04 +0800
|
||||
Subject: [PATCH] Added support for building the riscv64 protoc binaries with
|
||||
Kokoro
|
||||
|
||||
|
||||
diff --git a/kokoro/release/protoc/linux/build.sh b/kokoro/release/protoc/linux/build.sh
|
||||
index 44a95aab3..13be7b2ed 100755
|
||||
--- a/kokoro/release/protoc/linux/build.sh
|
||||
+++ b/kokoro/release/protoc/linux/build.sh
|
||||
@@ -22,6 +22,9 @@ protoc-artifacts/build-protoc.sh linux aarch_64 protoc
|
||||
sudo apt install -y g++-powerpc64le-linux-gnu
|
||||
protoc-artifacts/build-protoc.sh linux ppcle_64 protoc
|
||||
|
||||
+sudo apt install -y g++-riscv64-linux-gnu
|
||||
+protoc-artifacts/build-protoc.sh linux riscv64 protoc
|
||||
+
|
||||
sudo apt install -y g++-s390x-linux-gnu
|
||||
protoc-artifacts/build-protoc.sh linux s390_64 protoc
|
||||
|
||||
diff --git a/protoc-artifacts/build-protoc.sh b/protoc-artifacts/build-protoc.sh
|
||||
index 460284262..20ca05739 100755
|
||||
--- a/protoc-artifacts/build-protoc.sh
|
||||
+++ b/protoc-artifacts/build-protoc.sh
|
||||
@@ -93,6 +93,8 @@ checkArch ()
|
||||
assertEq $format "elf64-x86-64" $LINENO
|
||||
elif [[ "$ARCH" == aarch_64 ]]; then
|
||||
assertEq $format "elf64-little" $LINENO
|
||||
+ elif [[ "$ARCH" == riscv64 ]]; then
|
||||
+ assertEq $format "elf64-littleriscv" $LINENO
|
||||
elif [[ "$ARCH" == s390_64 ]]; then
|
||||
if [[ $host_machine == s390x ]];then
|
||||
assertEq $format "elf64-s390" $LINENO
|
||||
@@ -162,6 +164,9 @@ checkDependencies ()
|
||||
elif [[ "$ARCH" == aarch_64 ]]; then
|
||||
dump_cmd='objdump -p '"$1"' | grep NEEDED'
|
||||
white_list="libpthread\.so\.0\|libm\.so\.6\|libc\.so\.6\|ld-linux-aarch64\.so\.1"
|
||||
+ elif [[ "$ARCH" == riscv64 ]]; then
|
||||
+ dump_cmd='objdump -p '"$1"' | grep NEEDED'
|
||||
+ white_list="libz\.so\.1\|libm\.so\.6\|libc\.so\.6\|ld-linux-riscv64-lp64d\.so\.1"
|
||||
fi
|
||||
elif [[ "$OS" == osx ]]; then
|
||||
dump_cmd='otool -L '"$1"' | fgrep dylib'
|
||||
@@ -226,6 +231,8 @@ elif [[ "$(uname)" == Linux* ]]; then
|
||||
elif [[ "$ARCH" == ppcle_64 ]]; then
|
||||
CXXFLAGS="$CXXFLAGS -m64"
|
||||
CONFIGURE_ARGS="$CONFIGURE_ARGS --host=powerpc64le-linux-gnu"
|
||||
+ elif [[ "$ARCH" == riscv64 ]]; then
|
||||
+ CONFIGURE_ARGS="$CONFIGURE_ARGS --host=riscv64-linux-gnu"
|
||||
elif [[ "$ARCH" == s390_64 ]]; then
|
||||
CXXFLAGS="$CXXFLAGS -m64"
|
||||
CONFIGURE_ARGS="$CONFIGURE_ARGS --host=s390x-linux-gnu"
|
||||
--
|
||||
2.41.0
|
||||
|
||||
14
README.md
14
README.md
@ -1,10 +1,22 @@
|
||||
# flink
|
||||
|
||||
#### 介绍
|
||||
Stateful Computations over Data Streams
|
||||
Apache Flink是由Apache软件基金会开发的开源流处理框架,其核心是用Java和Scala编写的分布式流数据处理引擎。Flink是一个面向分布式数据流处理和批量数据处理的开源计算平台,它能够基于同一个Flink运行时,提供支持流处理和批处理两种类型应用的功能。
|
||||
Flink以数据并行和流水线方式执行任意流数据程序,Flink的流水线运行时系统可以执行批处理和流处理程序。此外,Flink的运行时本身也支持迭代算法的执行。
|
||||
现有的开源计算方案,会把流处理和批处理作为两种不同的应用类型,因为它们所提供的SLA(Service-Level-Aggreement)是完全不相同的:流处理一般需要支持低延迟、Exactly-once保证,而批处理需要支持高吞吐、高效处理。
|
||||
|
||||
#### 软件架构
|
||||
软件架构说明
|
||||
Flink的架构体系同样也遵行分层架构设计的理念,基本上分为三层,API&Libraries层、Runtine核心层以及物理部署层。
|
||||
- API&Libraries层:提供了支撑流计算和批计算的接口,同时在此基础之上抽象出不同的应用类型的组件库。
|
||||
- Runtime 核心层:负责对上层不同接口提供基础服务,支持分布式Stream作业的执行、JobGraph到ExecutionGraph 的映射转换、任务调度等,将DataStream和DataSet转成统一的可执行的Task Operator.
|
||||
- 物理部署层:Flink 支持多种部署模式,本机,集群(Standalone/YARN)、云(GCE/EC2)、Kubenetes。
|
||||
|
||||
#### ARM支持:
|
||||
|
||||
1. 移植指南:https://www.hikunpeng.com/document/detail/zh/kunpengbds/ecosystemEnable/Flink/kunpengflink191_02_0001.html
|
||||
2. 部署指南:https://www.hikunpeng.com/document/detail/zh/kunpengbds/ecosystemEnable/Flink/kunpengflink_04_0001.html
|
||||
3. 调优指南:https://www.hikunpeng.com/document/detail/zh/kunpengbds/ecosystemEnable/Flink/kunpengflinkhdp_05_0002.html
|
||||
|
||||
|
||||
#### 安装教程
|
||||
|
||||
64
flink.spec
64
flink.spec
@ -11,7 +11,7 @@
|
||||
|
||||
Name: flink
|
||||
Version: 1.17.1
|
||||
Release: 1
|
||||
Release: 5
|
||||
Summary: Stateful Computations over Data Streams
|
||||
License: Apache License v2.0
|
||||
URL: https://github.com/apache/%{name}
|
||||
@ -21,18 +21,58 @@ Source3: https://packages.confluent.io/maven/io/confluent/kafka-avro-seri
|
||||
Source4: https://packages.confluent.io/maven/io/confluent/kafka-schema-serializer/7.2.2/kafka-schema-serializer-7.2.2.jar
|
||||
Source5: https://packages.confluent.io/maven/org/apache/kafka/kafka-clients/7.2.2-ccs/kafka-clients-7.2.2-ccs.jar
|
||||
Source6: npm-8.1.2.tar.gz
|
||||
Source7: https://github.com/protocolbuffers/protobuf/archive/refs/tags/v3.21.7.tar.gz
|
||||
Source8: https://github.com/os72/protoc-jar/archive/refs/tags/v3.11.4.tar.gz
|
||||
Source9: https://github.com/os72/protoc-jar-maven-plugin/archive/refs/tags/v3.11.4.zip
|
||||
|
||||
Patch0: 0001-add-npm.hw-repo.patch
|
||||
Patch1: 0002-fix-compilation-failure.patch
|
||||
Patch2: 0003-Skip-flink-avro-confluent-registry-test.patch
|
||||
Patch3: 0004-update-npm.hw-repo.patch
|
||||
Patch4: 0005-upgrade-os-maven-plugin-to-1.7.1.patch
|
||||
Patch1000: 1000-Add-protoc-java-support-for-riscv64.patch
|
||||
Patch1001: 1001-Added-support-for-building-the-riscv64-protoc-binari.patch
|
||||
BuildRequires: java-1.8.0-openjdk-devel maven npm
|
||||
Requires: java-1.8.0-openjdk
|
||||
%ifarch riscv64
|
||||
BuildRequires: autoconf automake libtool pkgconfig zlib-devel libstdc++-static gcc-c++
|
||||
%endif
|
||||
|
||||
%description
|
||||
Apache Flink is a framework and distributed processing engine for stateful computations over unbounded and bounded data streams. Flink has been designed to run in all common cluster environments, perform computations at in-memory speed and at any scale.
|
||||
|
||||
%prep
|
||||
%autosetup -p1 -n %{name}-release-%{version}
|
||||
%ifarch riscv64
|
||||
%setup -qn %{name}-release-%{version}
|
||||
%patch0 -p1
|
||||
%patch1 -p1
|
||||
%patch2 -p1
|
||||
%patch3 -p1
|
||||
%patch4 -p1
|
||||
mkdir -p ${HOME}/%{name}-prep_dir
|
||||
# protoc
|
||||
tar -mxf %{SOURCE7} -C ${HOME}/%{name}-prep_dir
|
||||
pushd ${HOME}/%{name}-prep_dir/protobuf-3.21.7
|
||||
%patch1001 -p1
|
||||
./autogen.sh
|
||||
./protoc-artifacts/build-protoc.sh linux riscv64 protoc
|
||||
mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=3.21.7 -Dclassifier=linux-riscv64 -Dpackaging=exe -Dfile=protoc-artifacts/target/linux/riscv64/protoc.exe
|
||||
popd
|
||||
# protoc-jar
|
||||
tar -mxf %{SOURCE8} -C ${HOME}/%{name}-prep_dir
|
||||
pushd ${HOME}/%{name}-prep_dir/protoc-jar-3.11.4
|
||||
%patch1000 -p1
|
||||
mvn clean install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true
|
||||
popd
|
||||
# protoc-jar-maven-plugin
|
||||
unzip %{SOURCE9} -d ${HOME}/%{name}-prep_dir
|
||||
pushd ${HOME}/%{name}-prep_dir/protoc-jar-maven-plugin-3.11.4
|
||||
mvn clean install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true
|
||||
popd
|
||||
%else
|
||||
%setup -n %{name}-release-%{version}
|
||||
%autopatch -M999 -p1
|
||||
%endif
|
||||
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-registry-client -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE2}
|
||||
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-avro-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE3}
|
||||
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE4}
|
||||
@ -41,8 +81,14 @@ mkdir -p ${HOME}/.m2/repository/com/github/eirslett/npm/8.1.2/
|
||||
cp %{SOURCE6} ${HOME}/.m2/repository/com/github/eirslett/npm/8.1.2/
|
||||
|
||||
%build
|
||||
%ifarch riscv64
|
||||
export MAVEN_OPTS="-Xms10240m -Xmx10240m -Xss2m -XX:MaxTenuringThreshold=0"
|
||||
export JAVA_TOOL_OPTIONS="-Xms10240m -Xmx10240m"
|
||||
mvn clean install -DskipTests -Dfast -T 2 -Pskip-webui-build
|
||||
%else
|
||||
npm config set registry https://repo.huaweicloud.com/repository/npm/
|
||||
mvn clean install -DskipTests -Dfast -T 2
|
||||
%endif
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}/opt/
|
||||
@ -56,6 +102,20 @@ find %{buildroot}/opt/apache-%{name}-%{version}/ -type f -name '*.py' | xargs -i
|
||||
%license LICENSE
|
||||
|
||||
%changelog
|
||||
* Tue Dec 10 2024 shenzhongwei <shenzhongwei@kylinos.cn> - 1.17.1-5
|
||||
- include all patches in the source package.
|
||||
|
||||
* Tue Jun 25 2024 Dingli Zhang <dingli@iscas.ac.cn> - 1.17.1-4
|
||||
- Remove riscv64 prebuilded files
|
||||
- Build protoc, protoc-jar and protoc-jar-maven-plugin in prep state for riscv64
|
||||
|
||||
* Wed May 29 2024 Dingli Zhang <dingli@iscas.ac.cn> - 1.17.1-3
|
||||
- Fix riscv64 support
|
||||
- Upgrade os-maven-plugin to 1.7.1
|
||||
|
||||
* Wed Apr 24 2024 shaojiansong <shaojiansong@kylinos.cn> - 1.17.1-2
|
||||
- update npm download url for maven plugin
|
||||
|
||||
* Wed Sep 27 2023 xiexing <xiexing4@hisilicon.com> - 1.17.1-1
|
||||
- update version to 1.17.1
|
||||
|
||||
|
||||
BIN
v3.11.4.tar.gz
Normal file
BIN
v3.11.4.tar.gz
Normal file
Binary file not shown.
BIN
v3.11.4.zip
Normal file
BIN
v3.11.4.zip
Normal file
Binary file not shown.
BIN
v3.21.7.tar.gz
Normal file
BIN
v3.21.7.tar.gz
Normal file
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user