Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
8e76626
HDDS-10459. Bump snappy-java to 1.1.10.5 (#6324)
vtutrinov Mar 4, 2024
51c4839
HDDS-10423. Datanode fails to start with invalid checksum size settin…
adoroszlai Feb 27, 2024
499dc92
HDDS-10428. OzoneClientConfig#validate does not get called (#6282)
ChenSammi Feb 27, 2024
599637e
HDDS-10432. Hadoop FS client write(byte[], int, int) is very slow in …
duongkame Feb 27, 2024
8a17c57
HDDS-9235. ReplicationManager metrics not collected after restart. (#…
aswinshakil Feb 28, 2024
ec45af5
HDDS-10324. Metadata are not updated when keys are overwritten. (#6273)
ArafatKhan2198 Feb 28, 2024
b8363fd
HDDS-10191. Fix some mismatches in LICENSE
adoroszlai Mar 5, 2024
3b31e73
HDDS-10269. Remove duplicate addCacheEntry in OMDirectoryCreateReques…
ivandika3 Feb 1, 2024
c15637d
HDDS-10327. S3G does not work in a single-node deployment (#6257)
Tejaskriya Feb 27, 2024
21edae0
HDDS-10214. Update supported versions in security policy up to 1.4.0 …
ivandika3 Feb 25, 2024
f8e41ae
HDDS-10365. Fix description for `ozone getconf ozonemanagers` (#6263)
david1859168 Feb 25, 2024
fa7ac21
HDDS-10399. IndexOutOfBoundsException when shallow listing empty dire…
SaketaChalamchala Feb 25, 2024
09b912a
HDDS-10405. ozone admin has hard-coded info loglevel (#6254)
adoroszlai Feb 23, 2024
237d206
HDDS-10359. Recursively deleting volume with OBS bucket shows error d…
ashishkumar50 Feb 14, 2024
c7fe95d
HDDS-10000. Improve LeakDetector (#5916)
adoroszlai Jan 4, 2024
ce17b99
HDDS-10134. Avoid false positive ManagedObject leak report (#6013)
adoroszlai Jan 19, 2024
fc7ae0f
HDDS-10333. RocksDB logger not closed (#6200)
adoroszlai Feb 9, 2024
196462a
HDDS-10470. Populate Maven dependency cache in separate workflow (#6340)
adoroszlai Mar 8, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,10 +92,10 @@ jobs:
restore-keys: |
${{ runner.os }}-pnpm-
- name: Cache for maven dependencies
uses: actions/cache@v4
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
Expand Down Expand Up @@ -160,7 +160,7 @@ jobs:
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
Expand Down Expand Up @@ -198,7 +198,7 @@ jobs:
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
Expand Down Expand Up @@ -242,7 +242,7 @@ jobs:
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
Expand Down Expand Up @@ -310,7 +310,7 @@ jobs:
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
Expand Down Expand Up @@ -446,7 +446,7 @@ jobs:
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
Expand Down Expand Up @@ -506,7 +506,7 @@ jobs:
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/intermittent-test-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -105,13 +105,13 @@ jobs:
with:
ref: ${{ github.event.inputs.ref }}
- name: Cache for maven dependencies
uses: actions/cache@v4
uses: actions/cache/restore@v4
with:
path: ~/.m2/repository
key: maven-repo-${{ hashFiles('**/pom.xml') }}-8-single
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
maven-repo-${{ hashFiles('**/pom.xml') }}-8
maven-repo-${{ hashFiles('**/pom.xml') }}
maven-repo-
- name: Setup java
uses: actions/setup-java@v4
Expand Down
74 changes: 74 additions & 0 deletions .github/workflows/populate-cache.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# This workflow creates cache with Maven dependencies for Ozone build.

name: populate-cache

on:
push:
branches:
- master
- ozone-1.4
paths:
- 'pom.xml'
- '**/pom.xml'
- '.github/workflows/populate-cache.yml'
schedule:
- cron: '20 3 * * *'

jobs:
build:
runs-on: ubuntu-20.04
steps:
- name: Checkout project
uses: actions/checkout@v4

- name: Restore cache for Maven dependencies
id: restore-cache
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}

- name: Setup Java
if: steps.restore-cache.outputs.cache-hit != 'true'
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 8

- name: Fetch dependencies
if: steps.restore-cache.outputs.cache-hit != 'true'
run: mvn --batch-mode --fail-never --no-transfer-progress --show-version dependency:go-offline

- name: Delete Ozone jars from repo
if: steps.restore-cache.outputs.cache-hit != 'true'
run: rm -fr ~/.m2/repository/org/apache/ozone

- name: List repo contents
if: steps.restore-cache.outputs.cache-hit != 'true'
run: find ~/.m2/repository -type f | sort | xargs ls -lh

- name: Save cache for Maven dependencies
if: steps.restore-cache.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
12 changes: 4 additions & 8 deletions .github/workflows/repeat-acceptance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,11 @@ jobs:
restore-keys: |
${{ runner.os }}-pnpm-
- name: Cache for maven dependencies
uses: actions/cache@v4
uses: actions/cache/restore@v4
with:
path: ~/.m2/repository
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}-${{ env.JAVA_VERSION }}
restore-keys: |
maven-repo-${{ hashFiles('**/pom.xml') }}
Expand All @@ -115,12 +117,6 @@ jobs:
hadoop-ozone/dist/target/ozone-*.tar.gz
!hadoop-ozone/dist/target/ozone-*-src.tar.gz
retention-days: 1
- name: Delete temporary build artifacts before caching
run: |
#Never cache local artifacts
rm -rf ~/.m2/repository/org/apache/ozone/hdds*
rm -rf ~/.m2/repository/org/apache/ozone/ozone*
if: always()
acceptance:
needs:
- prepare-job
Expand Down
9 changes: 6 additions & 3 deletions SECURITY.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,16 @@
The first stable release of Apache Ozone is 1.0, the previous alpha and beta releases are not supported by the community.

| Version | Supported |
| ------------- | ------------------ |
|---------------| ------------------ |
| 0.3.0 (alpha) | :x: |
| 0.4.0 (alpha) | :x: |
| 0.4.1 (alpha) | :x: |
| 0.5.0 (beta) | :x: |
| 1.0 | :white_check_mark: |
| 1.1 | :white_check_mark: |
| 1.0.0 | :x: |
| 1.1.0 | :x: |
| 1.2.1 | :x: |
| 1.3.0 | :x: |
| 1.4.0 | :white_check_mark: |

## Reporting a Vulnerability

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ public enum ChecksumCombineMode {
private String fsDefaultBucketLayout = "FILE_SYSTEM_OPTIMIZED";

@PostConstruct
private void validate() {
public void validate() {
Preconditions.checkState(streamBufferSize > 0);
Preconditions.checkState(streamBufferFlushSize > 0);
Preconditions.checkState(streamBufferMaxSize > 0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,11 @@ public void write(@Nonnull byte[] byteArray) throws IOException {
write(ByteBuffer.wrap(byteArray));
}

@Override
public void write(@Nonnull byte[] byteArray, int off, int len) throws IOException {
write(ByteBuffer.wrap(byteArray), off, len);
}

@Override
public void write(int b) throws IOException {
write(new byte[]{(byte) b});
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.scm;

import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.junit.jupiter.api.Test;

import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_BYTES_PER_CHECKSUM_MIN_SIZE;
import static org.junit.jupiter.api.Assertions.assertEquals;

class TestOzoneClientConfig {

@Test
void missingSizeSuffix() {
final int bytes = 1024;

OzoneConfiguration conf = new OzoneConfiguration();
conf.setInt("ozone.client.bytes.per.checksum", bytes);

OzoneClientConfig subject = conf.getObject(OzoneClientConfig.class);

assertEquals(OZONE_CLIENT_BYTES_PER_CHECKSUM_MIN_SIZE, subject.getBytesPerChecksum());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@
*/
package org.apache.hadoop.hdds.utils;

import org.apache.ratis.util.UncheckedAutoCloseable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.lang.ref.ReferenceQueue;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;

/**
* Simple general resource leak detector using {@link ReferenceQueue} and {@link java.lang.ref.WeakReference} to
Expand All @@ -37,7 +39,7 @@
* class MyResource implements AutoClosable {
* static final LeakDetector LEAK_DETECTOR = new LeakDetector("MyResource");
*
* private final LeakTracker leakTracker = LEAK_DETECTOR.track(this, () -> {
* private final UncheckedAutoCloseable leakTracker = LEAK_DETECTOR.track(this, () -> {
* // report leaks, don't refer to the original object (MyResource) here.
* System.out.println("MyResource is not closed before being discarded.");
* });
Expand All @@ -53,13 +55,14 @@
* }</pre>
*/
public class LeakDetector {
public static final Logger LOG = LoggerFactory.getLogger(LeakDetector.class);
private static final Logger LOG = LoggerFactory.getLogger(LeakDetector.class);
private static final AtomicLong COUNTER = new AtomicLong();
private final ReferenceQueue<Object> queue = new ReferenceQueue<>();
private final Set<LeakTracker> allLeaks = Collections.newSetFromMap(new ConcurrentHashMap<>());
private final String name;

public LeakDetector(String name) {
this.name = name;
this.name = name + COUNTER.getAndIncrement();
start();
}

Expand Down Expand Up @@ -89,7 +92,7 @@ private void run() {
LOG.warn("Exiting leak detector {}.", name);
}

public LeakTracker track(Object leakable, Runnable reportLeak) {
public UncheckedAutoCloseable track(Object leakable, Runnable reportLeak) {
// A rate filter can be put here to only track a subset of all objects, e.g. 5%, 10%,
// if we have proofs that leak tracking impacts performance, or a single LeakDetector
// thread can't keep up with the pace of object allocation.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
*/
package org.apache.hadoop.hdds.utils;

import org.apache.ratis.util.UncheckedAutoCloseable;

import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.util.Set;
Expand All @@ -27,7 +29,7 @@
*
* @see LeakDetector
*/
public class LeakTracker extends WeakReference<Object> {
final class LeakTracker extends WeakReference<Object> implements UncheckedAutoCloseable {
private final Set<LeakTracker> allLeaks;
private final Runnable leakReporter;
LeakTracker(Object referent, ReferenceQueue<Object> referenceQueue,
Expand All @@ -40,6 +42,7 @@ public class LeakTracker extends WeakReference<Object> {
/**
* Called by the tracked resource when closing.
*/
@Override
public void close() {
allLeaks.remove(this);
}
Expand Down
Loading