flink/flink.spec
Dingli Zhang 5f99d862f4 Build protoc, protoc-jar and protoc-jar-maven-plugin in prep state for riscv64
(cherry picked from commit 2fedd6215f2f859c8558f778d02a59944a08bc6f)
2024-12-10 10:03:57 +08:00

138 lines
5.5 KiB
RPMSpec

%define __jar_repack %{nil}
%global debug_package %{nil}
# Use local caches compile
%global compile_for_local 0
%global with_debug 0
%global with_tests 0
Name: flink
Version: 1.17.1
Release: 4
Summary: Stateful Computations over Data Streams
License: Apache License v2.0
URL: https://github.com/apache/%{name}
Source0: https://github.com/apache/%{name}/archive/release-%{version}.tar.gz
Source2: https://packages.confluent.io/maven/io/confluent/kafka-schema-registry-client/7.2.2/kafka-schema-registry-client-7.2.2.jar
Source3: https://packages.confluent.io/maven/io/confluent/kafka-avro-serializer/7.2.2/kafka-avro-serializer-7.2.2.jar
Source4: https://packages.confluent.io/maven/io/confluent/kafka-schema-serializer/7.2.2/kafka-schema-serializer-7.2.2.jar
Source5: https://packages.confluent.io/maven/org/apache/kafka/kafka-clients/7.2.2-ccs/kafka-clients-7.2.2-ccs.jar
Source6: npm-8.1.2.tar.gz
Source7: https://github.com/protocolbuffers/protobuf/archive/refs/tags/v3.21.7.tar.gz
Source8: https://github.com/os72/protoc-jar/archive/refs/tags/v3.11.4.tar.gz
Source9: https://github.com/os72/protoc-jar-maven-plugin/archive/refs/tags/v3.11.4.zip
Patch0: 0001-add-npm.hw-repo.patch
Patch1: 0002-fix-compilation-failure.patch
Patch2: 0003-Skip-flink-avro-confluent-registry-test.patch
Patch3: 0004-update-npm.hw-repo.patch
Patch4: 0005-upgrade-os-maven-plugin-to-1.7.1.patch
BuildRequires: java-1.8.0-openjdk-devel maven npm
Requires: java-1.8.0-openjdk
%ifarch riscv64
BuildRequires: autoconf automake libtool pkgconfig zlib-devel libstdc++-static gcc-c++
Patch1000: 1000-Add-protoc-java-support-for-riscv64.patch
Patch1001: 1001-Added-support-for-building-the-riscv64-protoc-binari.patch
%endif
%description
Apache Flink is a framework and distributed processing engine for stateful computations over unbounded and bounded data streams. Flink has been designed to run in all common cluster environments, perform computations at in-memory speed and at any scale.
%prep
%ifarch riscv64
%setup -qn %{name}-release-%{version}
%patch0 -p1
%patch1 -p1
%patch2 -p1
%patch3 -p1
%patch4 -p1
mkdir -p ${HOME}/%{name}-prep_dir
# protoc
tar -mxf %{SOURCE7} -C ${HOME}/%{name}-prep_dir
pushd ${HOME}/%{name}-prep_dir/protobuf-3.21.7
%patch1001 -p1
./autogen.sh
./protoc-artifacts/build-protoc.sh linux riscv64 protoc
mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=3.21.7 -Dclassifier=linux-riscv64 -Dpackaging=exe -Dfile=protoc-artifacts/target/linux/riscv64/protoc.exe
popd
# protoc-jar
tar -mxf %{SOURCE8} -C ${HOME}/%{name}-prep_dir
pushd ${HOME}/%{name}-prep_dir/protoc-jar-3.11.4
%patch1000 -p1
mvn clean install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true
popd
# protoc-jar-maven-plugin
unzip %{SOURCE9} -d ${HOME}/%{name}-prep_dir
pushd ${HOME}/%{name}-prep_dir/protoc-jar-maven-plugin-3.11.4
mvn clean install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true
popd
%else
%autosetup -p1 -n %{name}-release-%{version}
%endif
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-registry-client -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE2}
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-avro-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE3}
mvn install:install-file -DgroupId=io.confluent -DartifactId=kafka-schema-serializer -Dversion=7.2.2 -Dpackaging=jar -Dfile=%{SOURCE4}
mvn install:install-file -DgroupId=org.apache.kafka -DartifactId=kafka-clients -Dversion=7.2.2-ccs -Dpackaging=jar -Dfile=%{SOURCE5}
mkdir -p ${HOME}/.m2/repository/com/github/eirslett/npm/8.1.2/
cp %{SOURCE6} ${HOME}/.m2/repository/com/github/eirslett/npm/8.1.2/
%build
%ifarch riscv64
export MAVEN_OPTS="-Xms10240m -Xmx10240m -Xss2m -XX:MaxTenuringThreshold=0"
export JAVA_TOOL_OPTIONS="-Xms10240m -Xmx10240m"
mvn clean install -DskipTests -Dfast -T 2 -Pskip-webui-build
%else
npm config set registry https://repo.huaweicloud.com/repository/npm/
mvn clean install -DskipTests -Dfast -T 2
%endif
%install
mkdir -p %{buildroot}/opt/
cp -rf ../%{name}-release-%{version}/flink-dist/target/%{name}-%{version}-bin/%{name}-%{version} %{buildroot}/opt/apache-%{name}-%{version}
find %{buildroot}/opt/apache-%{name}-%{version}/ -type f -name '*.py' | xargs -i sed -i 's/\#!\/usr\/bin\/env python$/\#!\/usr\/bin\/python3/' {}
%files
/opt/apache-%{name}-%{version}
%doc README.md
%license LICENSE
%changelog
* Tue Jun 25 2024 Dingli Zhang <dingli@iscas.ac.cn> - 1.17.1-4
- Remove riscv64 prebuilded files
- Build protoc, protoc-jar and protoc-jar-maven-plugin in prep state for riscv64
* Wed May 29 2024 Dingli Zhang <dingli@iscas.ac.cn> - 1.17.1-3
- Fix riscv64 support
- Upgrade os-maven-plugin to 1.7.1
* Wed Apr 24 2024 shaojiansong <shaojiansong@kylinos.cn> - 1.17.1-2
- update npm download url for maven plugin
* Wed Sep 27 2023 xiexing <xiexing4@hisilicon.com> - 1.17.1-1
- update version to 1.17.1
* Mon May 8 2023 Wenlong Zhang <zhangwenlong@loongson.cn> - 1.13.0-2
- add loongarch64 support
* Wed Dec 22 2021 chenjunbiao<chenjunbiao@uniontech.com> - 1.13.0-1
- Update version.
* Fri Dec 17 2021 weidong <weidong@uniontech.com> - 1.12.7-1
- Update version.
* Wed Jul 21 2021 zhangjiapeng <zhangjiapeng@huawei.com> - 1.12.0-4
- Fix compilation failure.
* Wed Dec 23 2020 weidong <weidong@uniontech.com> - 1.12.0-3
- Add npm.huawei repo.
* Thu Dec 17 2020 weidong <weidong@uniontech.com> - 1.12.0-2
- Fix compilation issues.
* Mon Dec 14 2020 weidong <weidong@uniontech.com> - 1.12.0-1
- Initial package.