So berechnen Sie den Durchschnitt von Array in JavaScript
const arrAvg = arr => arr.reduce((a,b) => a + b, 0) / arr.length
TechWhizKid
const arrAvg = arr => arr.reduce((a,b) => a + b, 0) / arr.length
const scores = [ 20, 50, 75, 100, 115 ];
let total = 0;
for ( let i = 0; i < scores.length; i++ ) {
total += scores[i];
}
console.log( total / scores.length );
const avg = arr => {
const sum = arr.reduce((acc, cur) => acc + cur);
const average = sum/arr.length;
return average;
}
console.log(avg([1, 2, 3, 7, 8]));
var total = 0;
for(var i = 0; i < grades.length; i++) {
total += grades[i];
}
var avg = total / grades.length;
const average = (...args) => args.reduce((a, b) => a + b) / args.length;
// Example
average(1, 2, 3, 4); // 2.5
const sum = times.reduce((a, b) => a + b, 0);
const avg = (sum / times.length) || 0;
console.log(`The sum is: ${sum}. The average is: ${avg}.`);